diff --git a/pyatlan/version.txt b/pyatlan/version.txt index 4ae7a7206..45acc9e66 100644 --- a/pyatlan/version.txt +++ b/pyatlan/version.txt @@ -1 +1 @@ -9.2.1 \ No newline at end of file +9.2.1 diff --git a/pyatlan_v9/model/assets/__init__.py b/pyatlan_v9/model/assets/__init__.py index 47057d705..ed6cb1d02 100644 --- a/pyatlan_v9/model/assets/__init__.py +++ b/pyatlan_v9/model/assets/__init__.py @@ -1,1019 +1,106 @@ -# Auto-generated by PythonMsgspecRenderer.pkl — DO NOT EDIT +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT # SPDX-License-Identifier: Apache-2.0 # Copyright 2024 Atlan Pte. Ltd. -# isort: skip_file +# +# NOTE: The lazy-loading logic below (_build_lazy_index / __getattr__) is +# intentionally NOT auto-generated — preserve it when regenerating this file. +# It replaces the previous eager _discover_and_export() call which imported +# all ~80 _init_*.py modules at package import time regardless of which types +# were actually needed, causing unnecessary memory usage in apps that only use +# a handful of models. -import lazy_loader as lazy +""" +PyAtlan Models - Auto-generated asset model classes. -from .entity import AtlasClassification, Entity, TermAssignment -from .related_entity import RelatedEntity, SaveSemantic +This module lazily re-exports all types from _init_*.py modules. +New modules are automatically picked up without needing to regenerate this file. -__PYATLAN_V9_ASSETS__ = { - "_init_adf": [ - "ADF", - "AdfActivity", - "AdfDataflow", - "AdfDataset", - "AdfLinkedservice", - "AdfPipeline", - "RelatedADF", - "RelatedAdfActivity", - "RelatedAdfDataflow", - "RelatedAdfDataset", - "RelatedAdfLinkedservice", - "RelatedAdfPipeline", - ], - "_init_adls": [ - "ADLS", - "ADLSAccount", - "ADLSContainer", - "ADLSObject", - "RelatedADLS", - "RelatedADLSAccount", - "RelatedADLSContainer", - "RelatedADLSObject", - ], - "_init_ai": [ - "AI", - "AIApplication", - "AIModel", - "AIModelVersion", - "RelatedAI", - "RelatedAIApplication", - "RelatedAIModel", - "RelatedAIModelVersion", - ], - "_init_airflow": [ - "Airflow", - "AirflowDag", - "AirflowTask", - "RelatedAirflow", - "RelatedAirflowDag", - "RelatedAirflowTask", - ], - "_init_anaplan": [ - "Anaplan", - "AnaplanApp", - "AnaplanDimension", - "AnaplanLineItem", - "AnaplanList", - "AnaplanModel", - "AnaplanModule", - "AnaplanPage", - "AnaplanSystemDimension", - "AnaplanView", - "AnaplanWorkspace", - "RelatedAnaplan", - "RelatedAnaplanApp", - "RelatedAnaplanDimension", - "RelatedAnaplanLineItem", - "RelatedAnaplanList", - "RelatedAnaplanModel", - "RelatedAnaplanModule", - "RelatedAnaplanPage", - "RelatedAnaplanSystemDimension", - "RelatedAnaplanView", - "RelatedAnaplanWorkspace", - ], - "_init_anomalo": [ - "Anomalo", - "AnomaloCheck", - "RelatedAnomalo", - "RelatedAnomaloCheck", - ], - "_init_api": [ - "API", - "APIField", - "APIObject", - "APIPath", - "APIQuery", - "APISpec", - "RelatedAPI", - "RelatedAPIField", - "RelatedAPIObject", - "RelatedAPIPath", - "RelatedAPIQuery", - "RelatedAPISpec", - ], - "_init_app": [ - "App", - "Application", - "ApplicationField", - "RelatedApp", - "RelatedApplication", - "RelatedApplicationField", - ], - "_init_app_workflow_run": ["AppWorkflowRun", "RelatedAppWorkflowRun"], - "_init_asset": [ - "Asset", - "DataSet", - "Incident", - "Infrastructure", - "ProcessExecution", - "RelatedAsset", - "RelatedDataSet", - "RelatedIncident", - "RelatedInfrastructure", - "RelatedProcessExecution", - ], - "_init_asset_grouping": [ - "AssetGrouping", - "AssetGroupingCollection", - "AssetGroupingStrategy", - "RelatedAssetGrouping", - "RelatedAssetGroupingCollection", - "RelatedAssetGroupingStrategy", - ], - "_init_atlan_app": [ - "AtlanApp", - "AtlanAppDeployment", - "AtlanAppInstalled", - "AtlanAppTool", - "AtlanAppWorkflow", - "RelatedAtlanApp", - "RelatedAtlanAppDeployment", - "RelatedAtlanAppInstalled", - "RelatedAtlanAppTool", - "RelatedAtlanAppWorkflow", - ], - "_init_azure_service_bus": [ - "AzureServiceBus", - "AzureServiceBusNamespace", - "AzureServiceBusSchema", - "AzureServiceBusTopic", - "RelatedAzureServiceBus", - "RelatedAzureServiceBusNamespace", - "RelatedAzureServiceBusSchema", - "RelatedAzureServiceBusTopic", - ], - "_init_bigquery": [ - "BigqueryRoutine", - "RelatedBigqueryRoutine", - "RelatedBigqueryTag", - ], - "_init_business_policy": [ - "BusinessPolicy", - "RelatedBusinessPolicy", - "RelatedBusinessPolicyException", - "RelatedBusinessPolicyIncident", - "RelatedBusinessPolicyLog", - ], - "_init_cassandra": [ - "Cassandra", - "CassandraColumn", - "CassandraIndex", - "CassandraKeyspace", - "CassandraTable", - "CassandraView", - "RelatedCassandra", - "RelatedCassandraColumn", - "RelatedCassandraIndex", - "RelatedCassandraKeyspace", - "RelatedCassandraTable", - "RelatedCassandraView", - ], - "_init_catalog": [ - "BI", - "Catalog", - "EventStore", - "Insight", - "NoSQL", - "ObjectStore", - "RelatedBI", - "RelatedCatalog", - "RelatedEventStore", - "RelatedInsight", - "RelatedNoSQL", - "RelatedObjectStore", - "RelatedSaaS", - "SaaS", - ], - "_init_cloud": [ - "AWS", - "Azure", - "Cloud", - "Google", - "RelatedAWS", - "RelatedAzure", - "RelatedCloud", - "RelatedGoogle", - ], - "_init_cognite": [ - "Cognite", - "Cognite3DModel", - "CogniteAsset", - "CogniteEvent", - "CogniteFile", - "CogniteSequence", - "CogniteTimeSeries", - "RelatedCognite", - "RelatedCognite3DModel", - "RelatedCogniteAsset", - "RelatedCogniteEvent", - "RelatedCogniteFile", - "RelatedCogniteSequence", - "RelatedCogniteTimeSeries", - ], - "_init_cognos": [ - "Cognos", - "CognosColumn", - "CognosDashboard", - "CognosDataset", - "CognosDatasource", - "CognosExploration", - "CognosFile", - "CognosFolder", - "CognosModule", - "CognosPackage", - "CognosReport", - "RelatedCognos", - "RelatedCognosColumn", - "RelatedCognosDashboard", - "RelatedCognosDataset", - "RelatedCognosDatasource", - "RelatedCognosExploration", - "RelatedCognosFile", - "RelatedCognosFolder", - "RelatedCognosModule", - "RelatedCognosPackage", - "RelatedCognosReport", - ], - "_init_connection": ["Connection", "RelatedConnection"], - "_init_cosmos_mongo_db": [ - "CosmosMongoDB", - "CosmosMongoDBAccount", - "CosmosMongoDBCollection", - "CosmosMongoDBDatabase", - "RelatedCosmosMongoDB", - "RelatedCosmosMongoDBAccount", - "RelatedCosmosMongoDBCollection", - "RelatedCosmosMongoDBDatabase", - ], - "_init_cube": [ - "Cube", - "CubeDimension", - "CubeField", - "CubeHierarchy", - "MultiDimensionalDataset", - "RelatedCube", - "RelatedCubeDimension", - "RelatedCubeField", - "RelatedCubeHierarchy", - "RelatedMultiDimensionalDataset", - ], - "_init_custom": [ - "Custom", - "CustomEntity", - "RelatedCustom", - "RelatedCustomEntity", - ], - "_init_data_mesh": [ - "DataDomain", - "DataMesh", - "DataProduct", - "RelatedDataDomain", - "RelatedDataMesh", - "RelatedDataProduct", - "RelatedStakeholder", - "RelatedStakeholderTitle", - ], - "_init_data_quality": [ - "DataQuality", - "DataQualityRule", - "DataQualityRuleTemplate", - "Metric", - "RelatedDataQuality", - "RelatedDataQualityRule", - "RelatedDataQualityRuleTemplate", - "RelatedMetric", - ], - "_init_data_studio": [ - "DataStudio", - "DataStudioAsset", - "RelatedDataStudio", - "RelatedDataStudioAsset", - ], - "_init_databricks": [ - "Databricks", - "DatabricksAIModelContext", - "DatabricksAIModelVersion", - "DatabricksExternalLocation", - "DatabricksExternalLocationPath", - "DatabricksMetricView", - "DatabricksNotebook", - "DatabricksVolume", - "DatabricksVolumePath", - "RelatedDatabricks", - "RelatedDatabricksAIModelContext", - "RelatedDatabricksAIModelVersion", - "RelatedDatabricksExternalLocation", - "RelatedDatabricksExternalLocationPath", - "RelatedDatabricksMetricView", - "RelatedDatabricksNotebook", - "RelatedDatabricksUnityCatalogTag", - "RelatedDatabricksVolume", - "RelatedDatabricksVolumePath", - ], - "_init_dataverse": [ - "Dataverse", - "DataverseAttribute", - "DataverseEntity", - "RelatedDataverse", - "RelatedDataverseAttribute", - "RelatedDataverseEntity", - ], - "_init_dbt": [ - "Dbt", - "DbtColumnProcess", - "DbtDimension", - "DbtEntity", - "DbtMeasure", - "DbtMetric", - "DbtModel", - "DbtModelColumn", - "DbtProcess", - "DbtSeed", - "DbtSemanticModel", - "DbtSource", - "DbtTag", - "DbtTest", - "RelatedDbt", - "RelatedDbtColumnProcess", - "RelatedDbtDimension", - "RelatedDbtEntity", - "RelatedDbtMeasure", - "RelatedDbtMetric", - "RelatedDbtModel", - "RelatedDbtModelColumn", - "RelatedDbtProcess", - "RelatedDbtSeed", - "RelatedDbtSemanticModel", - "RelatedDbtSource", - "RelatedDbtTag", - "RelatedDbtTest", - ], - "_init_document_db": [ - "DocumentDB", - "DocumentDBCollection", - "DocumentDBDatabase", - "RelatedDocumentDB", - "RelatedDocumentDBCollection", - "RelatedDocumentDBDatabase", - ], - "_init_domo": [ - "Domo", - "DomoCard", - "DomoDashboard", - "DomoDataset", - "DomoDatasetColumn", - "RelatedDomo", - "RelatedDomoCard", - "RelatedDomoDashboard", - "RelatedDomoDataset", - "RelatedDomoDatasetColumn", - ], - "_init_dremio": [ - "Dremio", - "DremioColumn", - "DremioFolder", - "DremioPhysicalDataset", - "DremioSource", - "DremioSpace", - "DremioVirtualDataset", - "RelatedDremio", - "RelatedDremioColumn", - "RelatedDremioFolder", - "RelatedDremioPhysicalDataset", - "RelatedDremioSource", - "RelatedDremioSpace", - "RelatedDremioVirtualDataset", - ], - "_init_dynamo_db": [ - "DynamoDB", - "DynamoDBAttribute", - "DynamoDBSecondaryIndex", - "DynamoDBTable", - "RelatedDynamoDB", - "RelatedDynamoDBAttribute", - "RelatedDynamoDBGlobalSecondaryIndex", - "RelatedDynamoDBLocalSecondaryIndex", - "RelatedDynamoDBSecondaryIndex", - "RelatedDynamoDBTable", - ], - "_init_fabric": [ - "Fabric", - "FabricActivity", - "FabricDashboard", - "FabricDataPipeline", - "FabricDataflow", - "FabricDataflowEntityColumn", - "FabricPage", - "FabricReport", - "FabricSemanticModel", - "FabricSemanticModelTable", - "FabricSemanticModelTableColumn", - "FabricVisual", - "FabricWorkspace", - "RelatedFabric", - "RelatedFabricActivity", - "RelatedFabricDashboard", - "RelatedFabricDataPipeline", - "RelatedFabricDataflow", - "RelatedFabricDataflowEntityColumn", - "RelatedFabricPage", - "RelatedFabricReport", - "RelatedFabricSemanticModel", - "RelatedFabricSemanticModelTable", - "RelatedFabricSemanticModelTableColumn", - "RelatedFabricVisual", - "RelatedFabricWorkspace", - ], - "_init_fivetran": [ - "Fivetran", - "FivetranConnector", - "RelatedFivetran", - "RelatedFivetranConnector", - ], - "_init_flow": [ - "Flow", - "FlowControlOperation", - "FlowDataset", - "FlowDatasetOperation", - "FlowField", - "FlowFieldOperation", - "FlowFolder", - "FlowProject", - "FlowReusableUnit", - "RelatedFlow", - "RelatedFlowControlOperation", - "RelatedFlowDataset", - "RelatedFlowDatasetOperation", - "RelatedFlowField", - "RelatedFlowFieldOperation", - "RelatedFlowFolder", - "RelatedFlowProject", - "RelatedFlowReusableUnit", - ], - "_init_form": ["Form", "RelatedForm", "RelatedResponse"], - "_init_gcs": [ - "GCS", - "GCSBucket", - "GCSObject", - "RelatedGCS", - "RelatedGCSBucket", - "RelatedGCSObject", - ], - "_init_gtc": [ - "AtlasGlossary", - "AtlasGlossaryCategory", - "AtlasGlossaryTerm", - "RelatedAtlasGlossary", - "RelatedAtlasGlossaryCategory", - "RelatedAtlasGlossaryTerm", - ], - "_init_iceberg": [ - "Iceberg", - "IcebergCatalog", - "IcebergColumn", - "IcebergNamespace", - "IcebergTable", - "RelatedIceberg", - "RelatedIcebergCatalog", - "RelatedIcebergColumn", - "RelatedIcebergNamespace", - "RelatedIcebergTable", - ], - "_init_kafka": [ - "Kafka", - "KafkaConsumerGroup", - "KafkaTopic", - "RelatedAzureEventHub", - "RelatedAzureEventHubConsumerGroup", - "RelatedKafka", - "RelatedKafkaConsumerGroup", - "RelatedKafkaTopic", - ], - "_init_looker": [ - "Looker", - "LookerDashboard", - "LookerExplore", - "LookerField", - "LookerFolder", - "LookerLook", - "LookerModel", - "LookerProject", - "LookerQuery", - "LookerTile", - "LookerView", - "RelatedLooker", - "RelatedLookerDashboard", - "RelatedLookerExplore", - "RelatedLookerField", - "RelatedLookerFolder", - "RelatedLookerLook", - "RelatedLookerModel", - "RelatedLookerProject", - "RelatedLookerQuery", - "RelatedLookerTile", - "RelatedLookerView", - ], - "_init_manual": [ - "AccessControl", - "AuthPolicy", - "AzureEventHub", - "AzureEventHubConsumerGroup", - "Badge", - "BadgeCondition", - "Cognite3DModel", - "DataContract", - "Persona", - "Purpose", - "RelatedSuperset", - "RelatedSupersetChart", - "RelatedSupersetDashboard", - "RelatedSupersetDataset", - "SnowflakeDynamicTable", - "Superset", - "SupersetChart", - "SupersetDashboard", - "SupersetDataset", - ], - "_init_matillion": [ - "Matillion", - "MatillionComponent", - "MatillionGroup", - "MatillionJob", - "MatillionProject", - "RelatedMatillion", - "RelatedMatillionComponent", - "RelatedMatillionGroup", - "RelatedMatillionJob", - "RelatedMatillionProject", - ], - "_init_metabase": [ - "Metabase", - "MetabaseCollection", - "MetabaseDashboard", - "MetabaseQuestion", - "RelatedMetabase", - "RelatedMetabaseCollection", - "RelatedMetabaseDashboard", - "RelatedMetabaseQuestion", - ], - "_init_micro_strategy": [ - "MicroStrategy", - "MicroStrategyAttribute", - "MicroStrategyColumn", - "MicroStrategyCube", - "MicroStrategyDocument", - "MicroStrategyDossier", - "MicroStrategyFact", - "MicroStrategyMetric", - "MicroStrategyProject", - "MicroStrategyReport", - "MicroStrategyVisualization", - "RelatedMicroStrategy", - "RelatedMicroStrategyAttribute", - "RelatedMicroStrategyColumn", - "RelatedMicroStrategyCube", - "RelatedMicroStrategyDocument", - "RelatedMicroStrategyDossier", - "RelatedMicroStrategyFact", - "RelatedMicroStrategyMetric", - "RelatedMicroStrategyProject", - "RelatedMicroStrategyReport", - "RelatedMicroStrategyVisualization", - ], - "_init_mode": [ - "Mode", - "ModeChart", - "ModeCollection", - "ModeQuery", - "ModeReport", - "ModeWorkspace", - "RelatedMode", - "RelatedModeChart", - "RelatedModeCollection", - "RelatedModeQuery", - "RelatedModeReport", - "RelatedModeWorkspace", - ], - "_init_model": [ - "Model", - "ModelAttribute", - "ModelAttributeAssociation", - "ModelDataModel", - "ModelEntity", - "ModelEntityAssociation", - "ModelVersion", - "RelatedModel", - "RelatedModelAttribute", - "RelatedModelAttributeAssociation", - "RelatedModelDataModel", - "RelatedModelEntity", - "RelatedModelEntityAssociation", - "RelatedModelVersion", - ], - "_init_mongo_db": [ - "MongoDB", - "MongoDBCollection", - "MongoDBDatabase", - "RelatedMongoDB", - "RelatedMongoDBCollection", - "RelatedMongoDBDatabase", - ], - "_init_monte_carlo": [ - "MCIncident", - "MCMonitor", - "MonteCarlo", - "RelatedMCIncident", - "RelatedMCMonitor", - "RelatedMonteCarlo", - ], - "_init_namespace": [ - "Collection", - "Folder", - "Namespace", - "RelatedCollection", - "RelatedFolder", - "RelatedNamespace", - ], - "_init_notebook": ["Notebook", "RelatedNotebook"], - "_init_partial": [ - "Partial", - "PartialField", - "PartialObject", - "RelatedPartial", - "RelatedPartialField", - "RelatedPartialObject", - ], - "_init_power_bi": [ - "PowerBI", - "PowerBIApp", - "PowerBIColumn", - "PowerBIDashboard", - "PowerBIDataflow", - "PowerBIDataflowEntityColumn", - "PowerBIDataset", - "PowerBIDatasource", - "PowerBIMeasure", - "PowerBIPage", - "PowerBIReport", - "PowerBITable", - "PowerBITile", - "PowerBIWorkspace", - "RelatedPowerBI", - "RelatedPowerBIApp", - "RelatedPowerBIColumn", - "RelatedPowerBIDashboard", - "RelatedPowerBIDataflow", - "RelatedPowerBIDataflowEntityColumn", - "RelatedPowerBIDataset", - "RelatedPowerBIDatasource", - "RelatedPowerBIMeasure", - "RelatedPowerBIPage", - "RelatedPowerBIReport", - "RelatedPowerBITable", - "RelatedPowerBITile", - "RelatedPowerBIWorkspace", - ], - "_init_preset": [ - "Preset", - "PresetChart", - "PresetDashboard", - "PresetDataset", - "PresetWorkspace", - "RelatedPreset", - "RelatedPresetChart", - "RelatedPresetDashboard", - "RelatedPresetDataset", - "RelatedPresetWorkspace", - ], - "_init_process": [ - "BIProcess", - "ColumnProcess", - "Process", - "RelatedBIProcess", - "RelatedColumnProcess", - "RelatedConnectionProcess", - "RelatedProcess", - ], - "_init_qlik": [ - "Qlik", - "QlikApp", - "QlikChart", - "QlikColumn", - "QlikDataset", - "QlikSheet", - "QlikSpace", - "RelatedQlik", - "RelatedQlikApp", - "RelatedQlikChart", - "RelatedQlikColumn", - "RelatedQlikDataset", - "RelatedQlikSheet", - "RelatedQlikSpace", - "RelatedQlikStream", - ], - "_init_quick_sight": [ - "QuickSight", - "QuickSightAnalysis", - "QuickSightAnalysisVisual", - "QuickSightDashboard", - "QuickSightDashboardVisual", - "QuickSightDataset", - "QuickSightDatasetField", - "QuickSightFolder", - "RelatedQuickSight", - "RelatedQuickSightAnalysis", - "RelatedQuickSightAnalysisVisual", - "RelatedQuickSightDashboard", - "RelatedQuickSightDashboardVisual", - "RelatedQuickSightDataset", - "RelatedQuickSightDatasetField", - "RelatedQuickSightFolder", - ], - "_init_redash": [ - "Redash", - "RedashDashboard", - "RedashQuery", - "RedashVisualization", - "RelatedRedash", - "RelatedRedashDashboard", - "RelatedRedashQuery", - "RelatedRedashVisualization", - ], - "_init_referenceable": ["Referenceable", "RelatedReferenceable"], - "_init_resource": [ - "File", - "Link", - "Readme", - "ReadmeTemplate", - "RelatedBadge", - "RelatedFile", - "RelatedLink", - "RelatedReadme", - "RelatedReadmeTemplate", - "RelatedResource", - "Related__internal", - "Resource", - ], - "_init_s3": [ - "RelatedS3", - "RelatedS3Bucket", - "RelatedS3Object", - "RelatedS3Prefix", - "S3", - "S3Bucket", - "S3Object", - "S3Prefix", - ], - "_init_sage_maker": [ - "RelatedSageMaker", - "RelatedSageMakerFeature", - "RelatedSageMakerFeatureGroup", - "RelatedSageMakerModel", - "RelatedSageMakerModelDeployment", - "RelatedSageMakerModelGroup", - "SageMaker", - "SageMakerFeature", - "SageMakerFeatureGroup", - "SageMakerModel", - "SageMakerModelDeployment", - "SageMakerModelGroup", - ], - "_init_sage_maker_unified_studio": [ - "RelatedSageMakerUnifiedStudio", - "RelatedSageMakerUnifiedStudioAsset", - "RelatedSageMakerUnifiedStudioAssetSchema", - "RelatedSageMakerUnifiedStudioProject", - "RelatedSageMakerUnifiedStudioPublishedAsset", - "RelatedSageMakerUnifiedStudioSubscribedAsset", - "SageMakerUnifiedStudio", - "SageMakerUnifiedStudioAsset", - "SageMakerUnifiedStudioAssetSchema", - "SageMakerUnifiedStudioProject", - "SageMakerUnifiedStudioPublishedAsset", - "SageMakerUnifiedStudioSubscribedAsset", - ], - "_init_salesforce": [ - "RelatedSalesforce", - "RelatedSalesforceDashboard", - "RelatedSalesforceField", - "RelatedSalesforceObject", - "RelatedSalesforceOrganization", - "RelatedSalesforceReport", - "Salesforce", - "SalesforceDashboard", - "SalesforceField", - "SalesforceObject", - "SalesforceOrganization", - "SalesforceReport", - ], - "_init_sap": [ - "RelatedSAP", - "RelatedSapErpAbapProgram", - "RelatedSapErpCdsView", - "RelatedSapErpColumn", - "RelatedSapErpComponent", - "RelatedSapErpFunctionModule", - "RelatedSapErpTable", - "RelatedSapErpTransactionCode", - "RelatedSapErpView", - "SAP", - "SapErpAbapProgram", - "SapErpCdsView", - "SapErpColumn", - "SapErpComponent", - "SapErpFunctionModule", - "SapErpTable", - "SapErpTransactionCode", - "SapErpView", - ], - "_init_schema_registry": [ - "RelatedSchemaRegistry", - "RelatedSchemaRegistrySubject", - "SchemaRegistry", - "SchemaRegistrySubject", - ], - "_init_semantic": [ - "RelatedSemantic", - "RelatedSemanticDimension", - "RelatedSemanticEntity", - "RelatedSemanticField", - "RelatedSemanticMeasure", - "RelatedSemanticModel", - "Semantic", - "SemanticDimension", - "SemanticEntity", - "SemanticField", - "SemanticMeasure", - "SemanticModel", - ], - "_init_sigma": [ - "RelatedSigma", - "RelatedSigmaDataElement", - "RelatedSigmaDataElementField", - "RelatedSigmaDataset", - "RelatedSigmaDatasetColumn", - "RelatedSigmaPage", - "RelatedSigmaWorkbook", - "Sigma", - "SigmaDataElement", - "SigmaDataElementField", - "SigmaDataset", - "SigmaDatasetColumn", - "SigmaPage", - "SigmaWorkbook", - ], - "_init_sisense": [ - "RelatedSisense", - "RelatedSisenseDashboard", - "RelatedSisenseDatamodel", - "RelatedSisenseDatamodelTable", - "RelatedSisenseFolder", - "RelatedSisenseWidget", - "Sisense", - "SisenseDashboard", - "SisenseDatamodel", - "SisenseDatamodelTable", - "SisenseFolder", - "SisenseWidget", - ], - "_init_snowflake": [ - "RelatedSnowflake", - "RelatedSnowflakeAIModelContext", - "RelatedSnowflakeAIModelVersion", - "RelatedSnowflakeDynamicTable", - "RelatedSnowflakePipe", - "RelatedSnowflakeSemanticDimension", - "RelatedSnowflakeSemanticFact", - "RelatedSnowflakeSemanticLogicalTable", - "RelatedSnowflakeSemanticMetric", - "RelatedSnowflakeSemanticView", - "RelatedSnowflakeStage", - "RelatedSnowflakeStream", - "RelatedSnowflakeTag", - "Snowflake", - "SnowflakeAIModelContext", - "SnowflakeAIModelVersion", - "SnowflakeSemanticDimension", - "SnowflakeSemanticFact", - "SnowflakeSemanticLogicalTable", - "SnowflakeSemanticMetric", - "SnowflakeSemanticView", - ], - "_init_soda": [ - "RelatedSoda", - "RelatedSodaCheck", - "Soda", - "SodaCheck", - ], - "_init_spark": [ - "RelatedSpark", - "RelatedSparkJob", - "Spark", - "SparkJob", - ], - "_init_sql": [ - "CalculationView", - "Column", - "Database", - "Function", - "MaterialisedView", - "Procedure", - "Query", - "RelatedCalculationView", - "RelatedColumn", - "RelatedDatabase", - "RelatedFunction", - "RelatedMaterialisedView", - "RelatedProcedure", - "RelatedQuery", - "RelatedSQL", - "RelatedSchema", - "RelatedTable", - "RelatedTablePartition", - "RelatedView", - "SQL", - "Schema", - "Table", - "TablePartition", - "View", - ], - "_init_starburst": [ - "RelatedStarburst", - "RelatedStarburstDataset", - "RelatedStarburstDatasetColumn", - "Starburst", - "StarburstDataset", - "StarburstDatasetColumn", - ], - "_init_tableau": [ - "RelatedTableau", - "RelatedTableauCalculatedField", - "RelatedTableauDashboard", - "RelatedTableauDashboardField", - "RelatedTableauDatasource", - "RelatedTableauDatasourceField", - "RelatedTableauFlow", - "RelatedTableauMetric", - "RelatedTableauProject", - "RelatedTableauSite", - "RelatedTableauWorkbook", - "RelatedTableauWorksheet", - "RelatedTableauWorksheetField", - "Tableau", - "TableauCalculatedField", - "TableauDashboard", - "TableauDashboardField", - "TableauDatasource", - "TableauDatasourceField", - "TableauFlow", - "TableauMetric", - "TableauProject", - "TableauSite", - "TableauWorkbook", - "TableauWorksheet", - "TableauWorksheetField", - ], - "_init_tag": [ - "RelatedSourceTag", - "RelatedTag", - "RelatedTagAttachment", - "SourceTag", - "Tag", - ], - "_init_task": ["RelatedTask", "Task"], - "_init_thoughtspot": [ - "RelatedThoughtspot", - "RelatedThoughtspotAnswer", - "RelatedThoughtspotColumn", - "RelatedThoughtspotDashlet", - "RelatedThoughtspotLiveboard", - "RelatedThoughtspotTable", - "RelatedThoughtspotView", - "RelatedThoughtspotWorksheet", - "Thoughtspot", - "ThoughtspotAnswer", - "ThoughtspotColumn", - "ThoughtspotDashlet", - "ThoughtspotLiveboard", - "ThoughtspotTable", - "ThoughtspotView", - "ThoughtspotWorksheet", - ], - "_init_workflow": ["RelatedWorkflow", "RelatedWorkflowRun", "Workflow"], -} +Direct submodule imports are preferred and have zero overhead at import time:: -__getattr__, __dir__, __all__ = lazy.attach( - __name__, submod_attrs=__PYATLAN_V9_ASSETS__ -) + from pyatlan.models.column import Column # preferred — zero overhead -__all__ += [ - "AtlasClassification", +Package-level imports also work and only load the specific module needed:: + + from pyatlan.models import Column # lazy — only _init_sql.py loaded +""" + +from __future__ import annotations + +import ast +import importlib +import pkgutil +from pathlib import Path +from typing import Any + +# Base classes are always exported eagerly (they have no large transitive deps). +from .entity import AtlasClassification, Entity, TermAssignment +from .related_entity import RelatedEntity, SaveSemantic + +__all__ = [ + # Base classes "Entity", + "AtlasClassification", "TermAssignment", "RelatedEntity", "SaveSemantic", ] + +# Lazy index: class name → _init_module_name. +# Built once on first __getattr__ call via AST scanning (no module execution). +_lazy_index: dict[str, str] | None = None + + +def _build_lazy_index() -> dict[str, str]: + """Scan _init_*.py __all__ lists via AST to build a name→module map. + + Uses ast.parse() so no model modules are executed or imported — only their + source text is read to extract the __all__ list. + """ + index: dict[str, str] = {} + current_dir = Path(__file__).parent + for module_info in pkgutil.iter_modules([str(current_dir)]): + if not module_info.name.startswith("_init_"): + continue + module_file = current_dir / f"{module_info.name}.py" + try: + tree = ast.parse(module_file.read_bytes()) + except SyntaxError: + continue + for node in ast.walk(tree): + if isinstance(node, ast.Assign): + for target in node.targets: + if isinstance(target, ast.Name) and target.id == "__all__": + if isinstance(node.value, (ast.List, ast.Tuple)): + for elt in node.value.elts: + if isinstance(elt, ast.Constant) and isinstance( + elt.value, str + ): + index[elt.value] = module_info.name + return index + + +def __getattr__(name: str) -> Any: + """Lazy import of model classes on first access. + + Called by Python when an attribute is not found in this module's globals. + Enables ``from pyatlan.models import Column`` without eagerly importing all + model modules at package import time. + + The result is cached in globals() so subsequent accesses are O(1) dict lookups + and never go through __getattr__ again. + """ + global _lazy_index + if _lazy_index is None: + _lazy_index = _build_lazy_index() + + module_name = _lazy_index.get(name) + if module_name is not None: + module = importlib.import_module(f".{module_name}", __package__) + val = getattr(module, name) + # Cache in globals so future accesses bypass __getattr__ entirely. + globals()[name] = val + if name not in __all__: + __all__.append(name) + return val + + raise AttributeError(f"module 'pyatlan.models' has no attribute {name!r}") diff --git a/pyatlan_v9/model/assets/adf.py b/pyatlan_v9/model/assets/adf.py index d4ed6f95f..18d85437a 100644 --- a/pyatlan_v9/model/assets/adf.py +++ b/pyatlan_v9/model/assets/adf.py @@ -25,7 +25,6 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .adf_related import RelatedADF from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -93,6 +92,8 @@ class ADF(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADF" + adf_factory_name: Union[str, None, UnsetType] = UNSET """Defines the name of the factory in which this asset exists.""" @@ -194,66 +195,6 @@ class ADF(Asset): def __post_init__(self) -> None: self.type_name = "ADF" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADF instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ADF validation failed: {errors}") - - def minimize(self) -> "ADF": - """ - Return a minimal copy of this ADF with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADF with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADF instance with only the minimum required fields. - """ - self.validate() - return ADF(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADF": - """ - Create a :class:`RelatedADF` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADF reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADF(guid=self.guid) - return RelatedADF(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -506,9 +447,6 @@ def _adf_to_nested(adf: ADF) -> ADFNested: is_incomplete=adf.is_incomplete, provenance_type=adf.provenance_type, home_id=adf.home_id, - depth=adf.depth, - immediate_upstream=adf.immediate_upstream, - immediate_downstream=adf.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -538,6 +476,7 @@ def _adf_from_nested(nested: ADFNested) -> ADF: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -546,9 +485,6 @@ def _adf_from_nested(nested: ADFNested) -> ADF: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adf_activity.py b/pyatlan_v9/model/assets/adf_activity.py index de1d64334..53731e801 100644 --- a/pyatlan_v9/model/assets/adf_activity.py +++ b/pyatlan_v9/model/assets/adf_activity.py @@ -27,7 +27,6 @@ from pyatlan_v9.model.transform import register_asset from .adf_related import ( - RelatedAdfActivity, RelatedAdfDataflow, RelatedAdfDataset, RelatedAdfLinkedservice, @@ -124,6 +123,8 @@ class AdfActivity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfActivity" + adf_activity_type: Union[str, None, UnsetType] = UNSET """The type of the ADF activity.""" @@ -303,74 +304,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfActivity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.adf_pipeline is UNSET: - errors.append("adf_pipeline is required for creation") - if self.adf_pipeline_qualified_name is UNSET: - errors.append("adf_pipeline_qualified_name is required for creation") - if errors: - raise ValueError(f"AdfActivity validation failed: {errors}") - - def minimize(self) -> "AdfActivity": - """ - Return a minimal copy of this AdfActivity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfActivity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfActivity instance with only the minimum required fields. - """ - self.validate() - return AdfActivity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfActivity": - """ - Create a :class:`RelatedAdfActivity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfActivity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfActivity(guid=self.guid) - return RelatedAdfActivity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -748,9 +681,6 @@ def _adf_activity_to_nested(adf_activity: AdfActivity) -> AdfActivityNested: is_incomplete=adf_activity.is_incomplete, provenance_type=adf_activity.provenance_type, home_id=adf_activity.home_id, - depth=adf_activity.depth, - immediate_upstream=adf_activity.immediate_upstream, - immediate_downstream=adf_activity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -782,6 +712,7 @@ def _adf_activity_from_nested(nested: AdfActivityNested) -> AdfActivity: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -790,9 +721,6 @@ def _adf_activity_from_nested(nested: AdfActivityNested) -> AdfActivity: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_activity_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adf_dataflow.py b/pyatlan_v9/model/assets/adf_dataflow.py index 5cd01df9b..8d5857547 100644 --- a/pyatlan_v9/model/assets/adf_dataflow.py +++ b/pyatlan_v9/model/assets/adf_dataflow.py @@ -27,7 +27,6 @@ from .adf_related import ( RelatedAdfActivity, - RelatedAdfDataflow, RelatedAdfDataset, RelatedAdfLinkedservice, RelatedAdfPipeline, @@ -106,6 +105,8 @@ class AdfDataflow(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfDataflow" + adf_dataflow_sources: Union[List[str], None, UnsetType] = UNSET """The list of names of sources for this dataflow.""" @@ -228,66 +229,6 @@ class AdfDataflow(Asset): def __post_init__(self) -> None: self.type_name = "AdfDataflow" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfDataflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AdfDataflow validation failed: {errors}") - - def minimize(self) -> "AdfDataflow": - """ - Return a minimal copy of this AdfDataflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfDataflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfDataflow instance with only the minimum required fields. - """ - self.validate() - return AdfDataflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfDataflow": - """ - Create a :class:`RelatedAdfDataflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfDataflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfDataflow(guid=self.guid) - return RelatedAdfDataflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -577,9 +518,6 @@ def _adf_dataflow_to_nested(adf_dataflow: AdfDataflow) -> AdfDataflowNested: is_incomplete=adf_dataflow.is_incomplete, provenance_type=adf_dataflow.provenance_type, home_id=adf_dataflow.home_id, - depth=adf_dataflow.depth, - immediate_upstream=adf_dataflow.immediate_upstream, - immediate_downstream=adf_dataflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -611,6 +549,7 @@ def _adf_dataflow_from_nested(nested: AdfDataflowNested) -> AdfDataflow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -619,9 +558,6 @@ def _adf_dataflow_from_nested(nested: AdfDataflowNested) -> AdfDataflow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_dataflow_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adf_dataset.py b/pyatlan_v9/model/assets/adf_dataset.py index 3b37dd671..f4eb64688 100644 --- a/pyatlan_v9/model/assets/adf_dataset.py +++ b/pyatlan_v9/model/assets/adf_dataset.py @@ -28,7 +28,6 @@ from .adf_related import ( RelatedAdfActivity, RelatedAdfDataflow, - RelatedAdfDataset, RelatedAdfLinkedservice, RelatedAdfPipeline, ) @@ -114,6 +113,8 @@ class AdfDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfDataset" + adf_dataset_type: Union[str, None, UnsetType] = UNSET """Defines the type of the dataset.""" @@ -260,66 +261,6 @@ class AdfDataset(Asset): def __post_init__(self) -> None: self.type_name = "AdfDataset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AdfDataset validation failed: {errors}") - - def minimize(self) -> "AdfDataset": - """ - Return a minimal copy of this AdfDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfDataset instance with only the minimum required fields. - """ - self.validate() - return AdfDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfDataset": - """ - Create a :class:`RelatedAdfDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfDataset(guid=self.guid) - return RelatedAdfDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -647,9 +588,6 @@ def _adf_dataset_to_nested(adf_dataset: AdfDataset) -> AdfDatasetNested: is_incomplete=adf_dataset.is_incomplete, provenance_type=adf_dataset.provenance_type, home_id=adf_dataset.home_id, - depth=adf_dataset.depth, - immediate_upstream=adf_dataset.immediate_upstream, - immediate_downstream=adf_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -681,6 +619,7 @@ def _adf_dataset_from_nested(nested: AdfDatasetNested) -> AdfDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -689,9 +628,6 @@ def _adf_dataset_from_nested(nested: AdfDatasetNested) -> AdfDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adf_linkedservice.py b/pyatlan_v9/model/assets/adf_linkedservice.py index eccb46c06..a467282b9 100644 --- a/pyatlan_v9/model/assets/adf_linkedservice.py +++ b/pyatlan_v9/model/assets/adf_linkedservice.py @@ -29,7 +29,6 @@ RelatedAdfActivity, RelatedAdfDataflow, RelatedAdfDataset, - RelatedAdfLinkedservice, RelatedAdfPipeline, ) from .airflow_related import RelatedAirflowTask @@ -118,6 +117,8 @@ class AdfLinkedservice(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfLinkedservice" + adf_linkedservice_type: Union[str, None, UnsetType] = UNSET """Defines the type of the linked service.""" @@ -276,66 +277,6 @@ class AdfLinkedservice(Asset): def __post_init__(self) -> None: self.type_name = "AdfLinkedservice" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfLinkedservice instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AdfLinkedservice validation failed: {errors}") - - def minimize(self) -> "AdfLinkedservice": - """ - Return a minimal copy of this AdfLinkedservice with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfLinkedservice with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfLinkedservice instance with only the minimum required fields. - """ - self.validate() - return AdfLinkedservice(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfLinkedservice": - """ - Create a :class:`RelatedAdfLinkedservice` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfLinkedservice reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfLinkedservice(guid=self.guid) - return RelatedAdfLinkedservice(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -699,9 +640,6 @@ def _adf_linkedservice_to_nested( is_incomplete=adf_linkedservice.is_incomplete, provenance_type=adf_linkedservice.provenance_type, home_id=adf_linkedservice.home_id, - depth=adf_linkedservice.depth, - immediate_upstream=adf_linkedservice.immediate_upstream, - immediate_downstream=adf_linkedservice.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -735,6 +673,7 @@ def _adf_linkedservice_from_nested(nested: AdfLinkedserviceNested) -> AdfLinkeds updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -743,9 +682,6 @@ def _adf_linkedservice_from_nested(nested: AdfLinkedserviceNested) -> AdfLinkeds is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_linkedservice_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adf_pipeline.py b/pyatlan_v9/model/assets/adf_pipeline.py index 76f7955bd..0530e947b 100644 --- a/pyatlan_v9/model/assets/adf_pipeline.py +++ b/pyatlan_v9/model/assets/adf_pipeline.py @@ -30,7 +30,6 @@ RelatedAdfDataflow, RelatedAdfDataset, RelatedAdfLinkedservice, - RelatedAdfPipeline, ) from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck @@ -106,6 +105,8 @@ class AdfPipeline(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfPipeline" + adf_pipeline_activity_count: Union[int, None, UnsetType] = UNSET """Defines the count of activities in the pipline.""" @@ -228,66 +229,6 @@ class AdfPipeline(Asset): def __post_init__(self) -> None: self.type_name = "AdfPipeline" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfPipeline instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AdfPipeline validation failed: {errors}") - - def minimize(self) -> "AdfPipeline": - """ - Return a minimal copy of this AdfPipeline with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfPipeline with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfPipeline instance with only the minimum required fields. - """ - self.validate() - return AdfPipeline(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfPipeline": - """ - Create a :class:`RelatedAdfPipeline` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfPipeline reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfPipeline(guid=self.guid) - return RelatedAdfPipeline(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -577,9 +518,6 @@ def _adf_pipeline_to_nested(adf_pipeline: AdfPipeline) -> AdfPipelineNested: is_incomplete=adf_pipeline.is_incomplete, provenance_type=adf_pipeline.provenance_type, home_id=adf_pipeline.home_id, - depth=adf_pipeline.depth, - immediate_upstream=adf_pipeline.immediate_upstream, - immediate_downstream=adf_pipeline.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -611,6 +549,7 @@ def _adf_pipeline_from_nested(nested: AdfPipelineNested) -> AdfPipeline: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -619,9 +558,6 @@ def _adf_pipeline_from_nested(nested: AdfPipelineNested) -> AdfPipeline: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_pipeline_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adls.py b/pyatlan_v9/model/assets/adls.py index d742b2695..c58ca05fa 100644 --- a/pyatlan_v9/model/assets/adls.py +++ b/pyatlan_v9/model/assets/adls.py @@ -25,7 +25,6 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .adls_related import RelatedADLS from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -98,6 +97,8 @@ class ADLS(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADLS" + adls_account_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the account for this ADLS asset.""" @@ -214,66 +215,6 @@ class ADLS(Asset): def __post_init__(self) -> None: self.type_name = "ADLS" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADLS instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ADLS validation failed: {errors}") - - def minimize(self) -> "ADLS": - """ - Return a minimal copy of this ADLS with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADLS with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADLS instance with only the minimum required fields. - """ - self.validate() - return ADLS(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADLS": - """ - Create a :class:`RelatedADLS` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADLS reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADLS(guid=self.guid) - return RelatedADLS(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -551,9 +492,6 @@ def _adls_to_nested(adls: ADLS) -> ADLSNested: is_incomplete=adls.is_incomplete, provenance_type=adls.provenance_type, home_id=adls.home_id, - depth=adls.depth, - immediate_upstream=adls.immediate_upstream, - immediate_downstream=adls.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -583,6 +521,7 @@ def _adls_from_nested(nested: ADLSNested) -> ADLS: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -591,9 +530,6 @@ def _adls_from_nested(nested: ADLSNested) -> ADLS: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adls_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adls_account.py b/pyatlan_v9/model/assets/adls_account.py index 7bedd813f..47e676533 100644 --- a/pyatlan_v9/model/assets/adls_account.py +++ b/pyatlan_v9/model/assets/adls_account.py @@ -27,7 +27,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .adls_related import RelatedADLSAccount, RelatedADLSContainer +from .adls_related import RelatedADLSContainer from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -111,6 +111,8 @@ class ADLSAccount(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADLSAccount" + adls_etag: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="adlsETag" ) @@ -262,66 +264,6 @@ class ADLSAccount(Asset): def __post_init__(self) -> None: self.type_name = "ADLSAccount" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADLSAccount instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ADLSAccount validation failed: {errors}") - - def minimize(self) -> "ADLSAccount": - """ - Return a minimal copy of this ADLSAccount with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADLSAccount with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADLSAccount instance with only the minimum required fields. - """ - self.validate() - return ADLSAccount(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADLSAccount": - """ - Create a :class:`RelatedADLSAccount` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADLSAccount reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADLSAccount(guid=self.guid) - return RelatedADLSAccount(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -693,9 +635,6 @@ def _adls_account_to_nested(adls_account: ADLSAccount) -> ADLSAccountNested: is_incomplete=adls_account.is_incomplete, provenance_type=adls_account.provenance_type, home_id=adls_account.home_id, - depth=adls_account.depth, - immediate_upstream=adls_account.immediate_upstream, - immediate_downstream=adls_account.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -727,6 +666,7 @@ def _adls_account_from_nested(nested: ADLSAccountNested) -> ADLSAccount: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -735,9 +675,6 @@ def _adls_account_from_nested(nested: ADLSAccountNested) -> ADLSAccount: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adls_account_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adls_container.py b/pyatlan_v9/model/assets/adls_container.py index 3c6b23eb7..20be6c36e 100644 --- a/pyatlan_v9/model/assets/adls_container.py +++ b/pyatlan_v9/model/assets/adls_container.py @@ -27,7 +27,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .adls_related import RelatedADLSAccount, RelatedADLSContainer, RelatedADLSObject +from .adls_related import RelatedADLSAccount, RelatedADLSObject from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -108,6 +108,8 @@ class ADLSContainer(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADLSContainer" + adls_container_url: Union[str, None, UnsetType] = UNSET """URL of this container.""" @@ -256,76 +258,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADLSContainer instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.adls_account is UNSET: - errors.append("adls_account is required for creation") - if self.adls_account_name is UNSET: - errors.append("adls_account_name is required for creation") - if self.adls_account_qualified_name is UNSET: - errors.append("adls_account_qualified_name is required for creation") - if errors: - raise ValueError(f"ADLSContainer validation failed: {errors}") - - def minimize(self) -> "ADLSContainer": - """ - Return a minimal copy of this ADLSContainer with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADLSContainer with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADLSContainer instance with only the minimum required fields. - """ - self.validate() - return ADLSContainer(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADLSContainer": - """ - Create a :class:`RelatedADLSContainer` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADLSContainer reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADLSContainer(guid=self.guid) - return RelatedADLSContainer(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -701,9 +633,6 @@ def _adls_container_to_nested(adls_container: ADLSContainer) -> ADLSContainerNes is_incomplete=adls_container.is_incomplete, provenance_type=adls_container.provenance_type, home_id=adls_container.home_id, - depth=adls_container.depth, - immediate_upstream=adls_container.immediate_upstream, - immediate_downstream=adls_container.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -737,6 +666,7 @@ def _adls_container_from_nested(nested: ADLSContainerNested) -> ADLSContainer: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -745,9 +675,6 @@ def _adls_container_from_nested(nested: ADLSContainerNested) -> ADLSContainer: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adls_container_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/adls_object.py b/pyatlan_v9/model/assets/adls_object.py index 3623322f4..a40df7e7e 100644 --- a/pyatlan_v9/model/assets/adls_object.py +++ b/pyatlan_v9/model/assets/adls_object.py @@ -29,7 +29,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .adls_related import RelatedADLSContainer, RelatedADLSObject +from .adls_related import RelatedADLSContainer from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -122,6 +122,8 @@ class ADLSObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADLSObject" + adls_object_url: Union[str, None, UnsetType] = UNSET """URL of this object.""" @@ -308,80 +310,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADLSObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.adls_container is UNSET: - errors.append("adls_container is required for creation") - if self.adls_container_name is UNSET: - errors.append("adls_container_name is required for creation") - if self.adls_container_qualified_name is UNSET: - errors.append("adls_container_qualified_name is required for creation") - if self.adls_account_name is UNSET: - errors.append("adls_account_name is required for creation") - if self.adls_account_qualified_name is UNSET: - errors.append("adls_account_qualified_name is required for creation") - if errors: - raise ValueError(f"ADLSObject validation failed: {errors}") - - def minimize(self) -> "ADLSObject": - """ - Return a minimal copy of this ADLSObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADLSObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADLSObject instance with only the minimum required fields. - """ - self.validate() - return ADLSObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADLSObject": - """ - Create a :class:`RelatedADLSObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADLSObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADLSObject(guid=self.guid) - return RelatedADLSObject(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -884,9 +812,6 @@ def _adls_object_to_nested(adls_object: ADLSObject) -> ADLSObjectNested: is_incomplete=adls_object.is_incomplete, provenance_type=adls_object.provenance_type, home_id=adls_object.home_id, - depth=adls_object.depth, - immediate_upstream=adls_object.immediate_upstream, - immediate_downstream=adls_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -918,6 +843,7 @@ def _adls_object_from_nested(nested: ADLSObjectNested) -> ADLSObject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -926,9 +852,6 @@ def _adls_object_from_nested(nested: ADLSObjectNested) -> ADLSObject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adls_object_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/ai.py b/pyatlan_v9/model/assets/ai.py index ffcc5ae5b..6dd8e2d2a 100644 --- a/pyatlan_v9/model/assets/ai.py +++ b/pyatlan_v9/model/assets/ai.py @@ -26,7 +26,6 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .ai_related import RelatedAI from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -99,6 +98,8 @@ class AI(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AI" + ethical_ai_privacy_config: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="ethicalAIPrivacyConfig" ) @@ -229,66 +230,6 @@ class AI(Asset): def __post_init__(self) -> None: self.type_name = "AI" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AI instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AI validation failed: {errors}") - - def minimize(self) -> "AI": - """ - Return a minimal copy of this AI with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AI with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AI instance with only the minimum required fields. - """ - self.validate() - return AI(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAI": - """ - Create a :class:`RelatedAI` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAI reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAI(guid=self.guid) - return RelatedAI(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -590,9 +531,6 @@ def _ai_to_nested(ai: AI) -> AINested: is_incomplete=ai.is_incomplete, provenance_type=ai.provenance_type, home_id=ai.home_id, - depth=ai.depth, - immediate_upstream=ai.immediate_upstream, - immediate_downstream=ai.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -622,6 +560,7 @@ def _ai_from_nested(nested: AINested) -> AI: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -630,9 +569,6 @@ def _ai_from_nested(nested: AINested) -> AI: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_ai_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/ai_application.py b/pyatlan_v9/model/assets/ai_application.py index 5a6ecc94c..77b6e87de 100644 --- a/pyatlan_v9/model/assets/ai_application.py +++ b/pyatlan_v9/model/assets/ai_application.py @@ -29,7 +29,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .ai_related import RelatedAIApplication, RelatedAIModel +from .ai_related import RelatedAIModel from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -105,6 +105,8 @@ class AIApplication(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AIApplication" + ai_application_version: Union[str, None, UnsetType] = UNSET """Version of the AI application""" @@ -244,66 +246,6 @@ class AIApplication(Asset): def __post_init__(self) -> None: self.type_name = "AIApplication" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AIApplication instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AIApplication validation failed: {errors}") - - def minimize(self) -> "AIApplication": - """ - Return a minimal copy of this AIApplication with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AIApplication with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AIApplication instance with only the minimum required fields. - """ - self.validate() - return AIApplication(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAIApplication": - """ - Create a :class:`RelatedAIApplication` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAIApplication reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAIApplication(guid=self.guid) - return RelatedAIApplication(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -664,9 +606,6 @@ def _ai_application_to_nested(ai_application: AIApplication) -> AIApplicationNes is_incomplete=ai_application.is_incomplete, provenance_type=ai_application.provenance_type, home_id=ai_application.home_id, - depth=ai_application.depth, - immediate_upstream=ai_application.immediate_upstream, - immediate_downstream=ai_application.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -700,6 +639,7 @@ def _ai_application_from_nested(nested: AIApplicationNested) -> AIApplication: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -708,9 +648,6 @@ def _ai_application_from_nested(nested: AIApplicationNested) -> AIApplication: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_ai_application_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/ai_model.py b/pyatlan_v9/model/assets/ai_model.py index f8b16af0d..d01b3d0cd 100644 --- a/pyatlan_v9/model/assets/ai_model.py +++ b/pyatlan_v9/model/assets/ai_model.py @@ -30,7 +30,7 @@ from pyatlan_v9.model.transform import get_type, register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .ai_related import RelatedAIApplication, RelatedAIModel, RelatedAIModelVersion +from .ai_related import RelatedAIApplication, RelatedAIModelVersion from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -108,6 +108,8 @@ class AIModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AIModel" + ai_model_datasets_dsl: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="aiModelDatasetsDSL" ) @@ -255,66 +257,6 @@ class AIModel(Asset): def __post_init__(self) -> None: self.type_name = "AIModel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AIModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AIModel validation failed: {errors}") - - def minimize(self) -> "AIModel": - """ - Return a minimal copy of this AIModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AIModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AIModel instance with only the minimum required fields. - """ - self.validate() - return AIModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAIModel": - """ - Create a :class:`RelatedAIModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAIModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAIModel(guid=self.guid) - return RelatedAIModel(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -739,9 +681,6 @@ def _ai_model_to_nested(ai_model: AIModel) -> AIModelNested: is_incomplete=ai_model.is_incomplete, provenance_type=ai_model.provenance_type, home_id=ai_model.home_id, - depth=ai_model.depth, - immediate_upstream=ai_model.immediate_upstream, - immediate_downstream=ai_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -771,6 +710,7 @@ def _ai_model_from_nested(nested: AIModelNested) -> AIModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -779,9 +719,6 @@ def _ai_model_from_nested(nested: AIModelNested) -> AIModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_ai_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/ai_model_version.py b/pyatlan_v9/model/assets/ai_model_version.py index 2f3040e2b..b95916b15 100644 --- a/pyatlan_v9/model/assets/ai_model_version.py +++ b/pyatlan_v9/model/assets/ai_model_version.py @@ -27,7 +27,7 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .ai_related import RelatedAIModel, RelatedAIModelVersion +from .ai_related import RelatedAIModel from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -101,6 +101,8 @@ class AIModelVersion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AIModelVersion" + ethical_ai_privacy_config: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="ethicalAIPrivacyConfig" ) @@ -240,72 +242,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AIModelVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.ai_model is UNSET: - errors.append("ai_model is required for creation") - if errors: - raise ValueError(f"AIModelVersion validation failed: {errors}") - - def minimize(self) -> "AIModelVersion": - """ - Return a minimal copy of this AIModelVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AIModelVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AIModelVersion instance with only the minimum required fields. - """ - self.validate() - return AIModelVersion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAIModelVersion": - """ - Create a :class:`RelatedAIModelVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAIModelVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAIModelVersion(guid=self.guid) - return RelatedAIModelVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -623,9 +559,6 @@ def _ai_model_version_to_nested( is_incomplete=ai_model_version.is_incomplete, provenance_type=ai_model_version.provenance_type, home_id=ai_model_version.home_id, - depth=ai_model_version.depth, - immediate_upstream=ai_model_version.immediate_upstream, - immediate_downstream=ai_model_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -659,6 +592,7 @@ def _ai_model_version_from_nested(nested: AIModelVersionNested) -> AIModelVersio updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -667,9 +601,6 @@ def _ai_model_version_from_nested(nested: AIModelVersionNested) -> AIModelVersio is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_ai_model_version_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/airflow.py b/pyatlan_v9/model/assets/airflow.py index aec62556e..8c855e028 100644 --- a/pyatlan_v9/model/assets/airflow.py +++ b/pyatlan_v9/model/assets/airflow.py @@ -25,7 +25,7 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .airflow_related import RelatedAirflow, RelatedAirflowTask +from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -99,6 +99,8 @@ class Airflow(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Airflow" + airflow_tags: Union[List[str], None, UnsetType] = UNSET """Tags assigned to the asset in Airflow.""" @@ -221,66 +223,6 @@ class Airflow(Asset): def __post_init__(self) -> None: self.type_name = "Airflow" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Airflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Airflow validation failed: {errors}") - - def minimize(self) -> "Airflow": - """ - Return a minimal copy of this Airflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Airflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Airflow instance with only the minimum required fields. - """ - self.validate() - return Airflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAirflow": - """ - Create a :class:`RelatedAirflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAirflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAirflow(guid=self.guid) - return RelatedAirflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -571,9 +513,6 @@ def _airflow_to_nested(airflow: Airflow) -> AirflowNested: is_incomplete=airflow.is_incomplete, provenance_type=airflow.provenance_type, home_id=airflow.home_id, - depth=airflow.depth, - immediate_upstream=airflow.immediate_upstream, - immediate_downstream=airflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -603,6 +542,7 @@ def _airflow_from_nested(nested: AirflowNested) -> Airflow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -611,9 +551,6 @@ def _airflow_from_nested(nested: AirflowNested) -> Airflow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_airflow_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/airflow_dag.py b/pyatlan_v9/model/assets/airflow_dag.py index f31cc41c8..4bdc81f58 100644 --- a/pyatlan_v9/model/assets/airflow_dag.py +++ b/pyatlan_v9/model/assets/airflow_dag.py @@ -26,7 +26,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .airflow_related import RelatedAirflowDag, RelatedAirflowTask +from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -103,6 +103,8 @@ class AirflowDag(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AirflowDag" + airflow_dag_schedule: Union[str, None, UnsetType] = UNSET """Schedule for the DAG.""" @@ -234,66 +236,6 @@ class AirflowDag(Asset): def __post_init__(self) -> None: self.type_name = "AirflowDag" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AirflowDag instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AirflowDag validation failed: {errors}") - - def minimize(self) -> "AirflowDag": - """ - Return a minimal copy of this AirflowDag with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AirflowDag with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AirflowDag instance with only the minimum required fields. - """ - self.validate() - return AirflowDag(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAirflowDag": - """ - Create a :class:`RelatedAirflowDag` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAirflowDag reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAirflowDag(guid=self.guid) - return RelatedAirflowDag(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -628,9 +570,6 @@ def _airflow_dag_to_nested(airflow_dag: AirflowDag) -> AirflowDagNested: is_incomplete=airflow_dag.is_incomplete, provenance_type=airflow_dag.provenance_type, home_id=airflow_dag.home_id, - depth=airflow_dag.depth, - immediate_upstream=airflow_dag.immediate_upstream, - immediate_downstream=airflow_dag.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -662,6 +601,7 @@ def _airflow_dag_from_nested(nested: AirflowDagNested) -> AirflowDag: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -670,9 +610,6 @@ def _airflow_dag_from_nested(nested: AirflowDagNested) -> AirflowDag: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_airflow_dag_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/airflow_task.py b/pyatlan_v9/model/assets/airflow_task.py index 9d6e0e346..66447436e 100644 --- a/pyatlan_v9/model/assets/airflow_task.py +++ b/pyatlan_v9/model/assets/airflow_task.py @@ -118,6 +118,8 @@ class AirflowTask(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AirflowTask" + airflow_task_operator_class: Union[str, None, UnsetType] = UNSET """Class name for the operator this task uses.""" @@ -294,76 +296,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AirflowTask instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.airflow_dag is UNSET: - errors.append("airflow_dag is required for creation") - if self.airflow_dag_name is UNSET: - errors.append("airflow_dag_name is required for creation") - if self.airflow_dag_qualified_name is UNSET: - errors.append("airflow_dag_qualified_name is required for creation") - if errors: - raise ValueError(f"AirflowTask validation failed: {errors}") - - def minimize(self) -> "AirflowTask": - """ - Return a minimal copy of this AirflowTask with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AirflowTask with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AirflowTask instance with only the minimum required fields. - """ - self.validate() - return AirflowTask(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAirflowTask": - """ - Create a :class:`RelatedAirflowTask` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAirflowTask reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAirflowTask(guid=self.guid) - return RelatedAirflowTask(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -768,9 +700,6 @@ def _airflow_task_to_nested(airflow_task: AirflowTask) -> AirflowTaskNested: is_incomplete=airflow_task.is_incomplete, provenance_type=airflow_task.provenance_type, home_id=airflow_task.home_id, - depth=airflow_task.depth, - immediate_upstream=airflow_task.immediate_upstream, - immediate_downstream=airflow_task.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -802,6 +731,7 @@ def _airflow_task_from_nested(nested: AirflowTaskNested) -> AirflowTask: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -810,9 +740,6 @@ def _airflow_task_from_nested(nested: AirflowTaskNested) -> AirflowTask: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_airflow_task_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan.py b/pyatlan_v9/model/assets/anaplan.py index 8731ecc55..33edc4e2d 100644 --- a/pyatlan_v9/model/assets/anaplan.py +++ b/pyatlan_v9/model/assets/anaplan.py @@ -26,7 +26,6 @@ from pyatlan_v9.model.transform import register_asset from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplan from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -98,6 +97,8 @@ class Anaplan(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Anaplan" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -214,66 +215,6 @@ class Anaplan(Asset): def __post_init__(self) -> None: self.type_name = "Anaplan" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Anaplan instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Anaplan validation failed: {errors}") - - def minimize(self) -> "Anaplan": - """ - Return a minimal copy of this Anaplan with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Anaplan with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Anaplan instance with only the minimum required fields. - """ - self.validate() - return Anaplan(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplan": - """ - Create a :class:`RelatedAnaplan` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplan reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplan(guid=self.guid) - return RelatedAnaplan(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -555,9 +496,6 @@ def _anaplan_to_nested(anaplan: Anaplan) -> AnaplanNested: is_incomplete=anaplan.is_incomplete, provenance_type=anaplan.provenance_type, home_id=anaplan.home_id, - depth=anaplan.depth, - immediate_upstream=anaplan.immediate_upstream, - immediate_downstream=anaplan.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -587,6 +525,7 @@ def _anaplan_from_nested(nested: AnaplanNested) -> Anaplan: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -595,9 +534,6 @@ def _anaplan_from_nested(nested: AnaplanNested) -> Anaplan: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_app.py b/pyatlan_v9/model/assets/anaplan_app.py index 10a5e14b6..f57a5d90b 100644 --- a/pyatlan_v9/model/assets/anaplan_app.py +++ b/pyatlan_v9/model/assets/anaplan_app.py @@ -27,7 +27,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplanApp, RelatedAnaplanPage +from .anaplan_related import RelatedAnaplanPage from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -100,6 +100,8 @@ class AnaplanApp(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanApp" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -219,66 +221,6 @@ class AnaplanApp(Asset): def __post_init__(self) -> None: self.type_name = "AnaplanApp" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanApp instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AnaplanApp validation failed: {errors}") - - def minimize(self) -> "AnaplanApp": - """ - Return a minimal copy of this AnaplanApp with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanApp with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanApp instance with only the minimum required fields. - """ - self.validate() - return AnaplanApp(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanApp": - """ - Create a :class:`RelatedAnaplanApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanApp(guid=self.guid) - return RelatedAnaplanApp(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, name: str, connection_qualified_name: str) -> "AnaplanApp": @@ -590,9 +532,6 @@ def _anaplan_app_to_nested(anaplan_app: AnaplanApp) -> AnaplanAppNested: is_incomplete=anaplan_app.is_incomplete, provenance_type=anaplan_app.provenance_type, home_id=anaplan_app.home_id, - depth=anaplan_app.depth, - immediate_upstream=anaplan_app.immediate_upstream, - immediate_downstream=anaplan_app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -624,6 +563,7 @@ def _anaplan_app_from_nested(nested: AnaplanAppNested) -> AnaplanApp: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -632,9 +572,6 @@ def _anaplan_app_from_nested(nested: AnaplanAppNested) -> AnaplanApp: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_app_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_dimension.py b/pyatlan_v9/model/assets/anaplan_dimension.py index 338ca7908..5a921af76 100644 --- a/pyatlan_v9/model/assets/anaplan_dimension.py +++ b/pyatlan_v9/model/assets/anaplan_dimension.py @@ -30,7 +30,6 @@ from .airflow_related import RelatedAirflowTask from .anaplan_related import ( - RelatedAnaplanDimension, RelatedAnaplanLineItem, RelatedAnaplanModel, RelatedAnaplanView, @@ -111,6 +110,8 @@ class AnaplanDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanDimension" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -250,82 +251,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_model is UNSET: - errors.append("anaplan_model is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanDimension validation failed: {errors}") - - def minimize(self) -> "AnaplanDimension": - """ - Return a minimal copy of this AnaplanDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanDimension instance with only the minimum required fields. - """ - self.validate() - return AnaplanDimension(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanDimension": - """ - Create a :class:`RelatedAnaplanDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanDimension(guid=self.guid) - return RelatedAnaplanDimension(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -686,9 +611,6 @@ def _anaplan_dimension_to_nested( is_incomplete=anaplan_dimension.is_incomplete, provenance_type=anaplan_dimension.provenance_type, home_id=anaplan_dimension.home_id, - depth=anaplan_dimension.depth, - immediate_upstream=anaplan_dimension.immediate_upstream, - immediate_downstream=anaplan_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -722,6 +644,7 @@ def _anaplan_dimension_from_nested(nested: AnaplanDimensionNested) -> AnaplanDim updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -730,9 +653,6 @@ def _anaplan_dimension_from_nested(nested: AnaplanDimensionNested) -> AnaplanDim is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_line_item.py b/pyatlan_v9/model/assets/anaplan_line_item.py index 3bf045dcd..ebc569669 100644 --- a/pyatlan_v9/model/assets/anaplan_line_item.py +++ b/pyatlan_v9/model/assets/anaplan_line_item.py @@ -31,7 +31,6 @@ from .airflow_related import RelatedAirflowTask from .anaplan_related import ( RelatedAnaplanDimension, - RelatedAnaplanLineItem, RelatedAnaplanList, RelatedAnaplanModule, ) @@ -110,6 +109,8 @@ class AnaplanLineItem(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanLineItem" + anaplan_line_item_formula: Union[str, None, UnsetType] = UNSET """Formula of the AnaplanLineItem from the source system.""" @@ -246,86 +247,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanLineItem instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_module is UNSET: - errors.append("anaplan_module is required for creation") - if self.anaplan_module_name is UNSET: - errors.append("anaplan_module_name is required for creation") - if self.anaplan_module_qualified_name is UNSET: - errors.append("anaplan_module_qualified_name is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanLineItem validation failed: {errors}") - - def minimize(self) -> "AnaplanLineItem": - """ - Return a minimal copy of this AnaplanLineItem with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanLineItem with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanLineItem instance with only the minimum required fields. - """ - self.validate() - return AnaplanLineItem(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanLineItem": - """ - Create a :class:`RelatedAnaplanLineItem` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanLineItem reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanLineItem(guid=self.guid) - return RelatedAnaplanLineItem(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -687,9 +608,6 @@ def _anaplan_line_item_to_nested( is_incomplete=anaplan_line_item.is_incomplete, provenance_type=anaplan_line_item.provenance_type, home_id=anaplan_line_item.home_id, - depth=anaplan_line_item.depth, - immediate_upstream=anaplan_line_item.immediate_upstream, - immediate_downstream=anaplan_line_item.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -723,6 +641,7 @@ def _anaplan_line_item_from_nested(nested: AnaplanLineItemNested) -> AnaplanLine updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -731,9 +650,6 @@ def _anaplan_line_item_from_nested(nested: AnaplanLineItemNested) -> AnaplanLine is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_line_item_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_list.py b/pyatlan_v9/model/assets/anaplan_list.py index d35212deb..c14e5e42f 100644 --- a/pyatlan_v9/model/assets/anaplan_list.py +++ b/pyatlan_v9/model/assets/anaplan_list.py @@ -29,11 +29,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import ( - RelatedAnaplanLineItem, - RelatedAnaplanList, - RelatedAnaplanModel, -) +from .anaplan_related import RelatedAnaplanLineItem, RelatedAnaplanModel from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -108,6 +104,8 @@ class AnaplanList(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanList" + anaplan_list_item_count: Union[int, None, UnsetType] = UNSET """Item Count of the AnaplanList from the source system.""" @@ -241,82 +239,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanList instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_model is UNSET: - errors.append("anaplan_model is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanList validation failed: {errors}") - - def minimize(self) -> "AnaplanList": - """ - Return a minimal copy of this AnaplanList with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanList with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanList instance with only the minimum required fields. - """ - self.validate() - return AnaplanList(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanList": - """ - Create a :class:`RelatedAnaplanList` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanList reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanList(guid=self.guid) - return RelatedAnaplanList(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -660,9 +582,6 @@ def _anaplan_list_to_nested(anaplan_list: AnaplanList) -> AnaplanListNested: is_incomplete=anaplan_list.is_incomplete, provenance_type=anaplan_list.provenance_type, home_id=anaplan_list.home_id, - depth=anaplan_list.depth, - immediate_upstream=anaplan_list.immediate_upstream, - immediate_downstream=anaplan_list.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -694,6 +613,7 @@ def _anaplan_list_from_nested(nested: AnaplanListNested) -> AnaplanList: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -702,9 +622,6 @@ def _anaplan_list_from_nested(nested: AnaplanListNested) -> AnaplanList: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_list_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_model.py b/pyatlan_v9/model/assets/anaplan_model.py index 5e5bc9204..8d28bdd1c 100644 --- a/pyatlan_v9/model/assets/anaplan_model.py +++ b/pyatlan_v9/model/assets/anaplan_model.py @@ -32,7 +32,6 @@ from .anaplan_related import ( RelatedAnaplanDimension, RelatedAnaplanList, - RelatedAnaplanModel, RelatedAnaplanModule, RelatedAnaplanPage, RelatedAnaplanWorkspace, @@ -113,6 +112,8 @@ class AnaplanModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanModel" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -250,78 +251,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_workspace is UNSET: - errors.append("anaplan_workspace is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanModel validation failed: {errors}") - - def minimize(self) -> "AnaplanModel": - """ - Return a minimal copy of this AnaplanModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanModel instance with only the minimum required fields. - """ - self.validate() - return AnaplanModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanModel": - """ - Create a :class:`RelatedAnaplanModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanModel(guid=self.guid) - return RelatedAnaplanModel(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -667,9 +596,6 @@ def _anaplan_model_to_nested(anaplan_model: AnaplanModel) -> AnaplanModelNested: is_incomplete=anaplan_model.is_incomplete, provenance_type=anaplan_model.provenance_type, home_id=anaplan_model.home_id, - depth=anaplan_model.depth, - immediate_upstream=anaplan_model.immediate_upstream, - immediate_downstream=anaplan_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -703,6 +629,7 @@ def _anaplan_model_from_nested(nested: AnaplanModelNested) -> AnaplanModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -711,9 +638,6 @@ def _anaplan_model_from_nested(nested: AnaplanModelNested) -> AnaplanModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_module.py b/pyatlan_v9/model/assets/anaplan_module.py index eab9099e4..b5017934e 100644 --- a/pyatlan_v9/model/assets/anaplan_module.py +++ b/pyatlan_v9/model/assets/anaplan_module.py @@ -32,7 +32,6 @@ from .anaplan_related import ( RelatedAnaplanLineItem, RelatedAnaplanModel, - RelatedAnaplanModule, RelatedAnaplanView, ) from .anomalo_related import RelatedAnomaloCheck @@ -109,6 +108,8 @@ class AnaplanModule(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanModule" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -242,82 +243,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanModule instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_model is UNSET: - errors.append("anaplan_model is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanModule validation failed: {errors}") - - def minimize(self) -> "AnaplanModule": - """ - Return a minimal copy of this AnaplanModule with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanModule with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanModule instance with only the minimum required fields. - """ - self.validate() - return AnaplanModule(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanModule": - """ - Create a :class:`RelatedAnaplanModule` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanModule reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanModule(guid=self.guid) - return RelatedAnaplanModule(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -655,9 +580,6 @@ def _anaplan_module_to_nested(anaplan_module: AnaplanModule) -> AnaplanModuleNes is_incomplete=anaplan_module.is_incomplete, provenance_type=anaplan_module.provenance_type, home_id=anaplan_module.home_id, - depth=anaplan_module.depth, - immediate_upstream=anaplan_module.immediate_upstream, - immediate_downstream=anaplan_module.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -691,6 +613,7 @@ def _anaplan_module_from_nested(nested: AnaplanModuleNested) -> AnaplanModule: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -699,9 +622,6 @@ def _anaplan_module_from_nested(nested: AnaplanModuleNested) -> AnaplanModule: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_module_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_page.py b/pyatlan_v9/model/assets/anaplan_page.py index d0518d31f..77ec83f09 100644 --- a/pyatlan_v9/model/assets/anaplan_page.py +++ b/pyatlan_v9/model/assets/anaplan_page.py @@ -29,7 +29,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplanApp, RelatedAnaplanModel, RelatedAnaplanPage +from .anaplan_related import RelatedAnaplanApp, RelatedAnaplanModel from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -106,6 +106,8 @@ class AnaplanPage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanPage" + anaplan_app_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanApp asset that contains this asset.""" @@ -243,74 +245,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanPage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_app is UNSET: - errors.append("anaplan_app is required for creation") - if self.anaplan_app_qualified_name is UNSET: - errors.append("anaplan_app_qualified_name is required for creation") - if errors: - raise ValueError(f"AnaplanPage validation failed: {errors}") - - def minimize(self) -> "AnaplanPage": - """ - Return a minimal copy of this AnaplanPage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanPage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanPage instance with only the minimum required fields. - """ - self.validate() - return AnaplanPage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanPage": - """ - Create a :class:`RelatedAnaplanPage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanPage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanPage(guid=self.guid) - return RelatedAnaplanPage(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -657,9 +591,6 @@ def _anaplan_page_to_nested(anaplan_page: AnaplanPage) -> AnaplanPageNested: is_incomplete=anaplan_page.is_incomplete, provenance_type=anaplan_page.provenance_type, home_id=anaplan_page.home_id, - depth=anaplan_page.depth, - immediate_upstream=anaplan_page.immediate_upstream, - immediate_downstream=anaplan_page.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -691,6 +622,7 @@ def _anaplan_page_from_nested(nested: AnaplanPageNested) -> AnaplanPage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -699,9 +631,6 @@ def _anaplan_page_from_nested(nested: AnaplanPageNested) -> AnaplanPage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_page_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_system_dimension.py b/pyatlan_v9/model/assets/anaplan_system_dimension.py index 5c7a44bb0..5e3529250 100644 --- a/pyatlan_v9/model/assets/anaplan_system_dimension.py +++ b/pyatlan_v9/model/assets/anaplan_system_dimension.py @@ -27,7 +27,6 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplanSystemDimension from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -99,6 +98,8 @@ class AnaplanSystemDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanSystemDimension" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -215,68 +216,6 @@ class AnaplanSystemDimension(Asset): def __post_init__(self) -> None: self.type_name = "AnaplanSystemDimension" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanSystemDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AnaplanSystemDimension validation failed: {errors}") - - def minimize(self) -> "AnaplanSystemDimension": - """ - Return a minimal copy of this AnaplanSystemDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanSystemDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanSystemDimension instance with only the minimum required fields. - """ - self.validate() - return AnaplanSystemDimension( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedAnaplanSystemDimension": - """ - Create a :class:`RelatedAnaplanSystemDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanSystemDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanSystemDimension(guid=self.guid) - return RelatedAnaplanSystemDimension(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -600,9 +539,6 @@ def _anaplan_system_dimension_to_nested( is_incomplete=anaplan_system_dimension.is_incomplete, provenance_type=anaplan_system_dimension.provenance_type, home_id=anaplan_system_dimension.home_id, - depth=anaplan_system_dimension.depth, - immediate_upstream=anaplan_system_dimension.immediate_upstream, - immediate_downstream=anaplan_system_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -638,6 +574,7 @@ def _anaplan_system_dimension_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -646,9 +583,6 @@ def _anaplan_system_dimension_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_system_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_view.py b/pyatlan_v9/model/assets/anaplan_view.py index 1b7b5f380..1dca224e2 100644 --- a/pyatlan_v9/model/assets/anaplan_view.py +++ b/pyatlan_v9/model/assets/anaplan_view.py @@ -29,11 +29,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import ( - RelatedAnaplanDimension, - RelatedAnaplanModule, - RelatedAnaplanView, -) +from .anaplan_related import RelatedAnaplanDimension, RelatedAnaplanModule from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -109,6 +105,8 @@ class AnaplanView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanView" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -251,86 +249,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_module is UNSET: - errors.append("anaplan_module is required for creation") - if self.anaplan_module_name is UNSET: - errors.append("anaplan_module_name is required for creation") - if self.anaplan_module_qualified_name is UNSET: - errors.append("anaplan_module_qualified_name is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanView validation failed: {errors}") - - def minimize(self) -> "AnaplanView": - """ - Return a minimal copy of this AnaplanView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanView instance with only the minimum required fields. - """ - self.validate() - return AnaplanView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanView": - """ - Create a :class:`RelatedAnaplanView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanView(guid=self.guid) - return RelatedAnaplanView(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -687,9 +605,6 @@ def _anaplan_view_to_nested(anaplan_view: AnaplanView) -> AnaplanViewNested: is_incomplete=anaplan_view.is_incomplete, provenance_type=anaplan_view.provenance_type, home_id=anaplan_view.home_id, - depth=anaplan_view.depth, - immediate_upstream=anaplan_view.immediate_upstream, - immediate_downstream=anaplan_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -721,6 +636,7 @@ def _anaplan_view_from_nested(nested: AnaplanViewNested) -> AnaplanView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -729,9 +645,6 @@ def _anaplan_view_from_nested(nested: AnaplanViewNested) -> AnaplanView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_view_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anaplan_workspace.py b/pyatlan_v9/model/assets/anaplan_workspace.py index 0456d0364..f5aa1b0ab 100644 --- a/pyatlan_v9/model/assets/anaplan_workspace.py +++ b/pyatlan_v9/model/assets/anaplan_workspace.py @@ -27,7 +27,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplanModel, RelatedAnaplanWorkspace +from .anaplan_related import RelatedAnaplanModel from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -102,6 +102,8 @@ class AnaplanWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanWorkspace" + anaplan_workspace_current_size: Union[int, None, UnsetType] = UNSET """Current size of the AnaplanWorkspace from the source system, estimated in MB.""" @@ -227,66 +229,6 @@ class AnaplanWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "AnaplanWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AnaplanWorkspace validation failed: {errors}") - - def minimize(self) -> "AnaplanWorkspace": - """ - Return a minimal copy of this AnaplanWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanWorkspace instance with only the minimum required fields. - """ - self.validate() - return AnaplanWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanWorkspace": - """ - Create a :class:`RelatedAnaplanWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanWorkspace(guid=self.guid) - return RelatedAnaplanWorkspace(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -622,9 +564,6 @@ def _anaplan_workspace_to_nested( is_incomplete=anaplan_workspace.is_incomplete, provenance_type=anaplan_workspace.provenance_type, home_id=anaplan_workspace.home_id, - depth=anaplan_workspace.depth, - immediate_upstream=anaplan_workspace.immediate_upstream, - immediate_downstream=anaplan_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -658,6 +597,7 @@ def _anaplan_workspace_from_nested(nested: AnaplanWorkspaceNested) -> AnaplanWor updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -666,9 +606,6 @@ def _anaplan_workspace_from_nested(nested: AnaplanWorkspaceNested) -> AnaplanWor is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anomalo.py b/pyatlan_v9/model/assets/anomalo.py index d1ab729be..ed846719e 100644 --- a/pyatlan_v9/model/assets/anomalo.py +++ b/pyatlan_v9/model/assets/anomalo.py @@ -26,7 +26,7 @@ from pyatlan_v9.model.transform import register_asset from .airflow_related import RelatedAirflowTask -from .anomalo_related import RelatedAnomalo, RelatedAnomaloCheck +from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -91,6 +91,8 @@ class Anomalo(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Anomalo" + dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET """Whether this data quality is part of contract (true) or not (false).""" @@ -189,66 +191,6 @@ class Anomalo(Asset): def __post_init__(self) -> None: self.type_name = "Anomalo" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Anomalo instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Anomalo validation failed: {errors}") - - def minimize(self) -> "Anomalo": - """ - Return a minimal copy of this Anomalo with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Anomalo with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Anomalo instance with only the minimum required fields. - """ - self.validate() - return Anomalo(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnomalo": - """ - Create a :class:`RelatedAnomalo` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnomalo reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnomalo(guid=self.guid) - return RelatedAnomalo(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -500,9 +442,6 @@ def _anomalo_to_nested(anomalo: Anomalo) -> AnomaloNested: is_incomplete=anomalo.is_incomplete, provenance_type=anomalo.provenance_type, home_id=anomalo.home_id, - depth=anomalo.depth, - immediate_upstream=anomalo.immediate_upstream, - immediate_downstream=anomalo.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -532,6 +471,7 @@ def _anomalo_from_nested(nested: AnomaloNested) -> Anomalo: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -540,9 +480,6 @@ def _anomalo_from_nested(nested: AnomaloNested) -> Anomalo: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anomalo_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/anomalo_check.py b/pyatlan_v9/model/assets/anomalo_check.py index f7bf6c28a..5f0a6ad97 100644 --- a/pyatlan_v9/model/assets/anomalo_check.py +++ b/pyatlan_v9/model/assets/anomalo_check.py @@ -104,6 +104,8 @@ class AnomaloCheck(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnomaloCheck" + anomalo_check_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET """QualifiedName of the asset associated with the check""" @@ -238,66 +240,6 @@ class AnomaloCheck(Asset): def __post_init__(self) -> None: self.type_name = "AnomaloCheck" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnomaloCheck instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AnomaloCheck validation failed: {errors}") - - def minimize(self) -> "AnomaloCheck": - """ - Return a minimal copy of this AnomaloCheck with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnomaloCheck with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnomaloCheck instance with only the minimum required fields. - """ - self.validate() - return AnomaloCheck(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnomaloCheck": - """ - Create a :class:`RelatedAnomaloCheck` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnomaloCheck reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnomaloCheck(guid=self.guid) - return RelatedAnomaloCheck(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -624,9 +566,6 @@ def _anomalo_check_to_nested(anomalo_check: AnomaloCheck) -> AnomaloCheckNested: is_incomplete=anomalo_check.is_incomplete, provenance_type=anomalo_check.provenance_type, home_id=anomalo_check.home_id, - depth=anomalo_check.depth, - immediate_upstream=anomalo_check.immediate_upstream, - immediate_downstream=anomalo_check.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -660,6 +599,7 @@ def _anomalo_check_from_nested(nested: AnomaloCheckNested) -> AnomaloCheck: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -668,9 +608,6 @@ def _anomalo_check_from_nested(nested: AnomaloCheckNested) -> AnomaloCheck: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anomalo_check_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/api.py b/pyatlan_v9/model/assets/api.py index 24a0d9e01..17c8e76c1 100644 --- a/pyatlan_v9/model/assets/api.py +++ b/pyatlan_v9/model/assets/api.py @@ -27,7 +27,6 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPI from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -99,6 +98,8 @@ class API(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "API" + api_spec_type: Union[str, None, UnsetType] = UNSET """Type of API, for example: OpenAPI, GraphQL, etc.""" @@ -218,66 +219,6 @@ class API(Asset): def __post_init__(self) -> None: self.type_name = "API" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this API instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"API validation failed: {errors}") - - def minimize(self) -> "API": - """ - Return a minimal copy of this API with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new API with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new API instance with only the minimum required fields. - """ - self.validate() - return API(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPI": - """ - Create a :class:`RelatedAPI` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPI reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPI(guid=self.guid) - return RelatedAPI(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -560,9 +501,6 @@ def _api_to_nested(api: API) -> APINested: is_incomplete=api.is_incomplete, provenance_type=api.provenance_type, home_id=api.home_id, - depth=api.depth, - immediate_upstream=api.immediate_upstream, - immediate_downstream=api.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -592,6 +530,7 @@ def _api_from_nested(nested: APINested) -> API: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -600,9 +539,6 @@ def _api_from_nested(nested: APINested) -> API: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/api_field.py b/pyatlan_v9/model/assets/api_field.py index 4f885b88c..ed237bb46 100644 --- a/pyatlan_v9/model/assets/api_field.py +++ b/pyatlan_v9/model/assets/api_field.py @@ -29,7 +29,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIField, RelatedAPIObject, RelatedAPIQuery +from .api_related import RelatedAPIObject, RelatedAPIQuery from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -106,6 +106,8 @@ class APIField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APIField" + api_field_type: Union[str, None, UnsetType] = UNSET """Type of APIField, as free text (e.g. STRING, NUMBER etc).""" @@ -246,74 +248,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APIField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.api_object is UNSET: - errors.append("api_object is required for creation") - if self.api_object_qualified_name is UNSET: - errors.append("api_object_qualified_name is required for creation") - if errors: - raise ValueError(f"APIField validation failed: {errors}") - - def minimize(self) -> "APIField": - """ - Return a minimal copy of this APIField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APIField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APIField instance with only the minimum required fields. - """ - self.validate() - return APIField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPIField": - """ - Create a :class:`RelatedAPIField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPIField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPIField(guid=self.guid) - return RelatedAPIField(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -746,9 +680,6 @@ def _api_field_to_nested(api_field: APIField) -> APIFieldNested: is_incomplete=api_field.is_incomplete, provenance_type=api_field.provenance_type, home_id=api_field.home_id, - depth=api_field.depth, - immediate_upstream=api_field.immediate_upstream, - immediate_downstream=api_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -780,6 +711,7 @@ def _api_field_from_nested(nested: APIFieldNested) -> APIField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -788,9 +720,6 @@ def _api_field_from_nested(nested: APIFieldNested) -> APIField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/api_object.py b/pyatlan_v9/model/assets/api_object.py index 5caeaccba..1cf55b81c 100644 --- a/pyatlan_v9/model/assets/api_object.py +++ b/pyatlan_v9/model/assets/api_object.py @@ -28,7 +28,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIField, RelatedAPIObject +from .api_related import RelatedAPIField from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -102,6 +102,8 @@ class APIObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APIObject" + api_field_count: Union[int, None, UnsetType] = UNSET """Count of the APIField of this object.""" @@ -227,66 +229,6 @@ class APIObject(Asset): def __post_init__(self) -> None: self.type_name = "APIObject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APIObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"APIObject validation failed: {errors}") - - def minimize(self) -> "APIObject": - """ - Return a minimal copy of this APIObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APIObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APIObject instance with only the minimum required fields. - """ - self.validate() - return APIObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPIObject": - """ - Create a :class:`RelatedAPIObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPIObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPIObject(guid=self.guid) - return RelatedAPIObject(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -615,9 +557,6 @@ def _api_object_to_nested(api_object: APIObject) -> APIObjectNested: is_incomplete=api_object.is_incomplete, provenance_type=api_object.provenance_type, home_id=api_object.home_id, - depth=api_object.depth, - immediate_upstream=api_object.immediate_upstream, - immediate_downstream=api_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -649,6 +588,7 @@ def _api_object_from_nested(nested: APIObjectNested) -> APIObject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -657,9 +597,6 @@ def _api_object_from_nested(nested: APIObjectNested) -> APIObject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_object_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/api_path.py b/pyatlan_v9/model/assets/api_path.py index fbc8c948d..35f741433 100644 --- a/pyatlan_v9/model/assets/api_path.py +++ b/pyatlan_v9/model/assets/api_path.py @@ -30,7 +30,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIPath, RelatedAPISpec +from .api_related import RelatedAPISpec from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -109,6 +109,8 @@ class APIPath(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APIPath" + api_path_summary: Union[str, None, UnsetType] = UNSET """Descriptive summary intended to apply to all operations in this path.""" @@ -257,76 +259,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APIPath instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.api_spec is UNSET: - errors.append("api_spec is required for creation") - if self.api_spec_name is UNSET: - errors.append("api_spec_name is required for creation") - if self.api_spec_qualified_name is UNSET: - errors.append("api_spec_qualified_name is required for creation") - if errors: - raise ValueError(f"APIPath validation failed: {errors}") - - def minimize(self) -> "APIPath": - """ - Return a minimal copy of this APIPath with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APIPath with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APIPath instance with only the minimum required fields. - """ - self.validate() - return APIPath(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPIPath": - """ - Create a :class:`RelatedAPIPath` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPIPath reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPIPath(guid=self.guid) - return RelatedAPIPath(qualified_name=self.qualified_name) - @property def api_path_raw_u_r_i(self) -> Union[str, None, UnsetType]: return self.api_path_raw_uri @@ -705,9 +637,6 @@ def _api_path_to_nested(api_path: APIPath) -> APIPathNested: is_incomplete=api_path.is_incomplete, provenance_type=api_path.provenance_type, home_id=api_path.home_id, - depth=api_path.depth, - immediate_upstream=api_path.immediate_upstream, - immediate_downstream=api_path.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -737,6 +666,7 @@ def _api_path_from_nested(nested: APIPathNested) -> APIPath: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -745,9 +675,6 @@ def _api_path_from_nested(nested: APIPathNested) -> APIPath: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_path_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/api_query.py b/pyatlan_v9/model/assets/api_query.py index ed2a58475..1a482b221 100644 --- a/pyatlan_v9/model/assets/api_query.py +++ b/pyatlan_v9/model/assets/api_query.py @@ -28,7 +28,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIField, RelatedAPIQuery +from .api_related import RelatedAPIField from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -104,6 +104,8 @@ class APIQuery(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APIQuery" + api_input_field_count: Union[int, None, UnsetType] = UNSET """Count of the APIField of this query that are input to it.""" @@ -235,66 +237,6 @@ class APIQuery(Asset): def __post_init__(self) -> None: self.type_name = "APIQuery" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APIQuery instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"APIQuery validation failed: {errors}") - - def minimize(self) -> "APIQuery": - """ - Return a minimal copy of this APIQuery with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APIQuery with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APIQuery instance with only the minimum required fields. - """ - self.validate() - return APIQuery(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPIQuery": - """ - Create a :class:`RelatedAPIQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPIQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPIQuery(guid=self.guid) - return RelatedAPIQuery(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -660,9 +602,6 @@ def _api_query_to_nested(api_query: APIQuery) -> APIQueryNested: is_incomplete=api_query.is_incomplete, provenance_type=api_query.provenance_type, home_id=api_query.home_id, - depth=api_query.depth, - immediate_upstream=api_query.immediate_upstream, - immediate_downstream=api_query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -694,6 +633,7 @@ def _api_query_from_nested(nested: APIQueryNested) -> APIQuery: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -702,9 +642,6 @@ def _api_query_from_nested(nested: APIQueryNested) -> APIQuery: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_query_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/api_spec.py b/pyatlan_v9/model/assets/api_spec.py index 716634298..6a30a4297 100644 --- a/pyatlan_v9/model/assets/api_spec.py +++ b/pyatlan_v9/model/assets/api_spec.py @@ -29,7 +29,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIPath, RelatedAPISpec +from .api_related import RelatedAPIPath from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -110,6 +110,8 @@ class APISpec(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APISpec" + api_spec_terms_of_service_url: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="apiSpecTermsOfServiceURL" ) @@ -262,66 +264,6 @@ class APISpec(Asset): def __post_init__(self) -> None: self.type_name = "APISpec" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APISpec instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"APISpec validation failed: {errors}") - - def minimize(self) -> "APISpec": - """ - Return a minimal copy of this APISpec with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APISpec with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APISpec instance with only the minimum required fields. - """ - self.validate() - return APISpec(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPISpec": - """ - Create a :class:`RelatedAPISpec` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPISpec reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPISpec(guid=self.guid) - return RelatedAPISpec(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, name: str, connection_qualified_name: str) -> "APISpec": @@ -684,9 +626,6 @@ def _api_spec_to_nested(api_spec: APISpec) -> APISpecNested: is_incomplete=api_spec.is_incomplete, provenance_type=api_spec.provenance_type, home_id=api_spec.home_id, - depth=api_spec.depth, - immediate_upstream=api_spec.immediate_upstream, - immediate_downstream=api_spec.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -716,6 +655,7 @@ def _api_spec_from_nested(nested: APISpecNested) -> APISpec: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -724,9 +664,6 @@ def _api_spec_from_nested(nested: APISpecNested) -> APISpec: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_spec_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/app.py b/pyatlan_v9/model/assets/app.py index 522847b4c..31cacf2b7 100644 --- a/pyatlan_v9/model/assets/app.py +++ b/pyatlan_v9/model/assets/app.py @@ -27,7 +27,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .app_related import RelatedApp, RelatedApplication, RelatedApplicationField +from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, Asset, @@ -91,6 +91,8 @@ class App(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "App" + app_id: Union[str, None, UnsetType] = UNSET """Unique identifier for the application asset from the source system.""" @@ -189,66 +191,6 @@ class App(Asset): def __post_init__(self) -> None: self.type_name = "App" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this App instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"App validation failed: {errors}") - - def minimize(self) -> "App": - """ - Return a minimal copy of this App with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new App with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new App instance with only the minimum required fields. - """ - self.validate() - return App(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedApp": - """ - Create a :class:`RelatedApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedApp(guid=self.guid) - return RelatedApp(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -496,9 +438,6 @@ def _app_to_nested(app: App) -> AppNested: is_incomplete=app.is_incomplete, provenance_type=app.provenance_type, home_id=app.home_id, - depth=app.depth, - immediate_upstream=app.immediate_upstream, - immediate_downstream=app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -528,6 +467,7 @@ def _app_from_nested(nested: AppNested) -> App: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -536,9 +476,6 @@ def _app_from_nested(nested: AppNested) -> App: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_app_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/app_workflow_run.py b/pyatlan_v9/model/assets/app_workflow_run.py index 0c6675afc..d5ccfac21 100644 --- a/pyatlan_v9/model/assets/app_workflow_run.py +++ b/pyatlan_v9/model/assets/app_workflow_run.py @@ -28,7 +28,6 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .app_workflow_run_related import RelatedAppWorkflowRun from .asset import ( _ASSET_REL_FIELDS, Asset, @@ -110,6 +109,8 @@ class AppWorkflowRun(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AppWorkflowRun" + app_workflow_run_label: Union[str, None, UnsetType] = UNSET """Root name for the workflow run.""" @@ -259,66 +260,6 @@ class AppWorkflowRun(Asset): def __post_init__(self) -> None: self.type_name = "AppWorkflowRun" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AppWorkflowRun instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AppWorkflowRun validation failed: {errors}") - - def minimize(self) -> "AppWorkflowRun": - """ - Return a minimal copy of this AppWorkflowRun with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AppWorkflowRun with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AppWorkflowRun instance with only the minimum required fields. - """ - self.validate() - return AppWorkflowRun(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAppWorkflowRun": - """ - Create a :class:`RelatedAppWorkflowRun` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAppWorkflowRun reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAppWorkflowRun(guid=self.guid) - return RelatedAppWorkflowRun(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -676,9 +617,6 @@ def _app_workflow_run_to_nested( is_incomplete=app_workflow_run.is_incomplete, provenance_type=app_workflow_run.provenance_type, home_id=app_workflow_run.home_id, - depth=app_workflow_run.depth, - immediate_upstream=app_workflow_run.immediate_upstream, - immediate_downstream=app_workflow_run.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -712,6 +650,7 @@ def _app_workflow_run_from_nested(nested: AppWorkflowRunNested) -> AppWorkflowRu updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -720,9 +659,6 @@ def _app_workflow_run_from_nested(nested: AppWorkflowRunNested) -> AppWorkflowRu is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_app_workflow_run_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/application.py b/pyatlan_v9/model/assets/application.py index 9293b5411..36ec05d37 100644 --- a/pyatlan_v9/model/assets/application.py +++ b/pyatlan_v9/model/assets/application.py @@ -95,6 +95,8 @@ class Application(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Application" + app_id: Union[str, None, UnsetType] = UNSET """Unique identifier for the application asset from the source system.""" @@ -201,66 +203,6 @@ class Application(Asset): def __post_init__(self) -> None: self.type_name = "Application" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Application instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Application validation failed: {errors}") - - def minimize(self) -> "Application": - """ - Return a minimal copy of this Application with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Application with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Application instance with only the minimum required fields. - """ - self.validate() - return Application(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedApplication": - """ - Create a :class:`RelatedApplication` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedApplication reference to this asset. - """ - if self.guid is not UNSET: - return RelatedApplication(guid=self.guid) - return RelatedApplication(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -556,9 +498,6 @@ def _application_to_nested(application: Application) -> ApplicationNested: is_incomplete=application.is_incomplete, provenance_type=application.provenance_type, home_id=application.home_id, - depth=application.depth, - immediate_upstream=application.immediate_upstream, - immediate_downstream=application.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -590,6 +529,7 @@ def _application_from_nested(nested: ApplicationNested) -> Application: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -598,9 +538,6 @@ def _application_from_nested(nested: ApplicationNested) -> Application: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_application_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/application_field.py b/pyatlan_v9/model/assets/application_field.py index b79a45f7c..32163339b 100644 --- a/pyatlan_v9/model/assets/application_field.py +++ b/pyatlan_v9/model/assets/application_field.py @@ -97,6 +97,8 @@ class ApplicationField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ApplicationField" + application_parent_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent Application asset that contains this ApplicationField asset.""" @@ -210,72 +212,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ApplicationField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.application_parent is UNSET: - errors.append("application_parent is required for creation") - if errors: - raise ValueError(f"ApplicationField validation failed: {errors}") - - def minimize(self) -> "ApplicationField": - """ - Return a minimal copy of this ApplicationField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ApplicationField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ApplicationField instance with only the minimum required fields. - """ - self.validate() - return ApplicationField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedApplicationField": - """ - Create a :class:`RelatedApplicationField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedApplicationField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedApplicationField(guid=self.guid) - return RelatedApplicationField(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -601,9 +537,6 @@ def _application_field_to_nested( is_incomplete=application_field.is_incomplete, provenance_type=application_field.provenance_type, home_id=application_field.home_id, - depth=application_field.depth, - immediate_upstream=application_field.immediate_upstream, - immediate_downstream=application_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -637,6 +570,7 @@ def _application_field_from_nested(nested: ApplicationFieldNested) -> Applicatio updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -645,9 +579,6 @@ def _application_field_from_nested(nested: ApplicationFieldNested) -> Applicatio is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_application_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/asset.py b/pyatlan_v9/model/assets/asset.py index aeb1fbe1b..895f60ef0 100644 --- a/pyatlan_v9/model/assets/asset.py +++ b/pyatlan_v9/model/assets/asset.py @@ -30,7 +30,6 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedAsset from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -77,6 +76,7 @@ class Asset(Referenceable): ANNOUNCEMENT_TYPE: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_AT: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_BY: ClassVar[Any] = None + ASSET_ANNOUNCEMENT_EXPIRED_AT: ClassVar[Any] = None OWNER_USERS: ClassVar[Any] = None OWNER_GROUPS: ClassVar[Any] = None ADMIN_USERS: ClassVar[Any] = None @@ -320,6 +320,9 @@ class Asset(Referenceable): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -994,66 +997,6 @@ def __post_init__(self) -> None: if self.type_name is UNSET: self.type_name = "Asset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Asset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Asset validation failed: {errors}") - - def minimize(self) -> "Asset": - """ - Return a minimal copy of this Asset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Asset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Asset instance with only the minimum required fields. - """ - self.validate() - return Asset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAsset": - """ - Create a :class:`RelatedAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAsset(guid=self.guid) - return RelatedAsset(qualified_name=self.qualified_name) - @classmethod def ref_by_guid( cls, guid: str, semantic: "SaveSemantic | str" = SaveSemantic.REPLACE @@ -1369,6 +1312,9 @@ class AssetAttributes(ReferenceableAttributes): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -2109,6 +2055,7 @@ def _populate_asset_attrs(attrs: AssetAttributes, obj: Asset) -> None: attrs.announcement_type = obj.announcement_type attrs.announcement_updated_at = obj.announcement_updated_at attrs.announcement_updated_by = obj.announcement_updated_by + attrs.asset_announcement_expired_at = obj.asset_announcement_expired_at attrs.owner_users = obj.owner_users attrs.owner_groups = obj.owner_groups attrs.admin_users = obj.admin_users @@ -2354,6 +2301,7 @@ def _extract_asset_attrs(attrs: AssetAttributes) -> dict: result["announcement_type"] = attrs.announcement_type result["announcement_updated_at"] = attrs.announcement_updated_at result["announcement_updated_by"] = attrs.announcement_updated_by + result["asset_announcement_expired_at"] = attrs.asset_announcement_expired_at result["owner_users"] = attrs.owner_users result["owner_groups"] = attrs.owner_groups result["admin_users"] = attrs.admin_users @@ -2642,9 +2590,6 @@ def _asset_to_nested(asset: Asset) -> AssetNested: is_incomplete=asset.is_incomplete, provenance_type=asset.provenance_type, home_id=asset.home_id, - depth=asset.depth, - immediate_upstream=asset.immediate_upstream, - immediate_downstream=asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2674,6 +2619,7 @@ def _asset_from_nested(nested: AssetNested) -> Asset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2682,9 +2628,6 @@ def _asset_from_nested(nested: AssetNested) -> Asset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_asset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2753,6 +2696,9 @@ def _asset_from_nested_bytes(data: bytes, serde: Serde) -> Asset: Asset.ANNOUNCEMENT_UPDATED_BY = KeywordField( "announcementUpdatedBy", "announcementUpdatedBy" ) +Asset.ASSET_ANNOUNCEMENT_EXPIRED_AT = NumericField( + "assetAnnouncementExpiredAt", "assetAnnouncementExpiredAt" +) Asset.OWNER_USERS = KeywordField("ownerUsers", "ownerUsers") Asset.OWNER_GROUPS = KeywordField("ownerGroups", "ownerGroups") Asset.ADMIN_USERS = KeywordField("adminUsers", "adminUsers") diff --git a/pyatlan_v9/model/assets/asset_grouping.py b/pyatlan_v9/model/assets/asset_grouping.py index a81d9e7e1..af175fc22 100644 --- a/pyatlan_v9/model/assets/asset_grouping.py +++ b/pyatlan_v9/model/assets/asset_grouping.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .asset_grouping_related import RelatedAssetGrouping from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class AssetGrouping(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AssetGrouping" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class AssetGrouping(Asset): def __post_init__(self) -> None: self.type_name = "AssetGrouping" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AssetGrouping instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AssetGrouping validation failed: {errors}") - - def minimize(self) -> "AssetGrouping": - """ - Return a minimal copy of this AssetGrouping with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AssetGrouping with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AssetGrouping instance with only the minimum required fields. - """ - self.validate() - return AssetGrouping(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAssetGrouping": - """ - Create a :class:`RelatedAssetGrouping` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAssetGrouping reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAssetGrouping(guid=self.guid) - return RelatedAssetGrouping(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -497,9 +438,6 @@ def _asset_grouping_to_nested(asset_grouping: AssetGrouping) -> AssetGroupingNes is_incomplete=asset_grouping.is_incomplete, provenance_type=asset_grouping.provenance_type, home_id=asset_grouping.home_id, - depth=asset_grouping.depth, - immediate_upstream=asset_grouping.immediate_upstream, - immediate_downstream=asset_grouping.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -533,6 +471,7 @@ def _asset_grouping_from_nested(nested: AssetGroupingNested) -> AssetGrouping: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -541,9 +480,6 @@ def _asset_grouping_from_nested(nested: AssetGroupingNested) -> AssetGrouping: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_asset_grouping_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/asset_grouping_collection.py b/pyatlan_v9/model/assets/asset_grouping_collection.py index a9f421dfb..c8c61bc27 100644 --- a/pyatlan_v9/model/assets/asset_grouping_collection.py +++ b/pyatlan_v9/model/assets/asset_grouping_collection.py @@ -38,10 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .asset_grouping_related import ( - RelatedAssetGroupingCollection, - RelatedAssetGroupingStrategy, -) +from .asset_grouping_related import RelatedAssetGroupingStrategy from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -96,6 +93,8 @@ class AssetGroupingCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AssetGroupingCollection" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -202,74 +201,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AssetGroupingCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.asset_grouping_strategy is UNSET: - errors.append("asset_grouping_strategy is required for creation") - if errors: - raise ValueError(f"AssetGroupingCollection validation failed: {errors}") - - def minimize(self) -> "AssetGroupingCollection": - """ - Return a minimal copy of this AssetGroupingCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AssetGroupingCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AssetGroupingCollection instance with only the minimum required fields. - """ - self.validate() - return AssetGroupingCollection( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedAssetGroupingCollection": - """ - Create a :class:`RelatedAssetGroupingCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAssetGroupingCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAssetGroupingCollection(guid=self.guid) - return RelatedAssetGroupingCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -535,9 +466,6 @@ def _asset_grouping_collection_to_nested( is_incomplete=asset_grouping_collection.is_incomplete, provenance_type=asset_grouping_collection.provenance_type, home_id=asset_grouping_collection.home_id, - depth=asset_grouping_collection.depth, - immediate_upstream=asset_grouping_collection.immediate_upstream, - immediate_downstream=asset_grouping_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -573,6 +501,7 @@ def _asset_grouping_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -581,9 +510,6 @@ def _asset_grouping_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_asset_grouping_collection_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/asset_grouping_strategy.py b/pyatlan_v9/model/assets/asset_grouping_strategy.py index 69171f14e..abe97cf50 100644 --- a/pyatlan_v9/model/assets/asset_grouping_strategy.py +++ b/pyatlan_v9/model/assets/asset_grouping_strategy.py @@ -37,10 +37,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .asset_grouping_related import ( - RelatedAssetGroupingCollection, - RelatedAssetGroupingStrategy, -) +from .asset_grouping_related import RelatedAssetGroupingCollection from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -95,6 +92,8 @@ class AssetGroupingStrategy(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AssetGroupingStrategy" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -195,66 +194,6 @@ class AssetGroupingStrategy(Asset): def __post_init__(self) -> None: self.type_name = "AssetGroupingStrategy" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AssetGroupingStrategy instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AssetGroupingStrategy validation failed: {errors}") - - def minimize(self) -> "AssetGroupingStrategy": - """ - Return a minimal copy of this AssetGroupingStrategy with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AssetGroupingStrategy with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AssetGroupingStrategy instance with only the minimum required fields. - """ - self.validate() - return AssetGroupingStrategy(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAssetGroupingStrategy": - """ - Create a :class:`RelatedAssetGroupingStrategy` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAssetGroupingStrategy reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAssetGroupingStrategy(guid=self.guid) - return RelatedAssetGroupingStrategy(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -520,9 +459,6 @@ def _asset_grouping_strategy_to_nested( is_incomplete=asset_grouping_strategy.is_incomplete, provenance_type=asset_grouping_strategy.provenance_type, home_id=asset_grouping_strategy.home_id, - depth=asset_grouping_strategy.depth, - immediate_upstream=asset_grouping_strategy.immediate_upstream, - immediate_downstream=asset_grouping_strategy.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -558,6 +494,7 @@ def _asset_grouping_strategy_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -566,9 +503,6 @@ def _asset_grouping_strategy_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_asset_grouping_strategy_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/asset_related.py b/pyatlan_v9/model/assets/asset_related.py index f62da5bf4..886c0b0b1 100644 --- a/pyatlan_v9/model/assets/asset_related.py +++ b/pyatlan_v9/model/assets/asset_related.py @@ -91,6 +91,9 @@ class RelatedAsset(RelatedReferenceable): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -761,7 +764,7 @@ class RelatedIncident(RelatedAsset): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "Incident" so it serializes correctly - incident_severity: Union[str, None, UnsetType] = UNSET + asset_severity: Union[str, None, UnsetType] = UNSET """Status of this asset's severity.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/atlan_app.py b/pyatlan_v9/model/assets/atlan_app.py index 4f9a3825d..2a3d1bd35 100644 --- a/pyatlan_v9/model/assets/atlan_app.py +++ b/pyatlan_v9/model/assets/atlan_app.py @@ -37,11 +37,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .atlan_app_related import ( - RelatedAtlanApp, - RelatedAtlanAppTool, - RelatedAtlanAppWorkflow, -) +from .atlan_app_related import RelatedAtlanAppTool, RelatedAtlanAppWorkflow from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -101,6 +97,8 @@ class AtlanApp(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlanApp" + atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the Atlan application this asset belongs to.""" @@ -214,66 +212,6 @@ class AtlanApp(Asset): def __post_init__(self) -> None: self.type_name = "AtlanApp" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanApp instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AtlanApp validation failed: {errors}") - - def minimize(self) -> "AtlanApp": - """ - Return a minimal copy of this AtlanApp with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanApp with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanApp instance with only the minimum required fields. - """ - self.validate() - return AtlanApp(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanApp": - """ - Create a :class:`RelatedAtlanApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanApp(guid=self.guid) - return RelatedAtlanApp(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -548,9 +486,6 @@ def _atlan_app_to_nested(atlan_app: AtlanApp) -> AtlanAppNested: is_incomplete=atlan_app.is_incomplete, provenance_type=atlan_app.provenance_type, home_id=atlan_app.home_id, - depth=atlan_app.depth, - immediate_upstream=atlan_app.immediate_upstream, - immediate_downstream=atlan_app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -582,6 +517,7 @@ def _atlan_app_from_nested(nested: AtlanAppNested) -> AtlanApp: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -590,9 +526,6 @@ def _atlan_app_from_nested(nested: AtlanAppNested) -> AtlanApp: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/atlan_app_deployment.py b/pyatlan_v9/model/assets/atlan_app_deployment.py index 709e1b908..8f09e3c93 100644 --- a/pyatlan_v9/model/assets/atlan_app_deployment.py +++ b/pyatlan_v9/model/assets/atlan_app_deployment.py @@ -38,11 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .atlan_app_related import ( - RelatedAtlanAppDeployment, - RelatedAtlanAppTool, - RelatedAtlanAppWorkflow, -) +from .atlan_app_related import RelatedAtlanAppTool, RelatedAtlanAppWorkflow from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -108,6 +104,8 @@ class AtlanAppDeployment(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlanAppDeployment" + atlan_app_version_id: Union[int, None, UnsetType] = UNSET """Version identifier for deployment.""" @@ -241,66 +239,6 @@ class AtlanAppDeployment(Asset): def __post_init__(self) -> None: self.type_name = "AtlanAppDeployment" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanAppDeployment instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AtlanAppDeployment validation failed: {errors}") - - def minimize(self) -> "AtlanAppDeployment": - """ - Return a minimal copy of this AtlanAppDeployment with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanAppDeployment with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanAppDeployment instance with only the minimum required fields. - """ - self.validate() - return AtlanAppDeployment(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanAppDeployment": - """ - Create a :class:`RelatedAtlanAppDeployment` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanAppDeployment reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanAppDeployment(guid=self.guid) - return RelatedAtlanAppDeployment(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -617,9 +555,6 @@ def _atlan_app_deployment_to_nested( is_incomplete=atlan_app_deployment.is_incomplete, provenance_type=atlan_app_deployment.provenance_type, home_id=atlan_app_deployment.home_id, - depth=atlan_app_deployment.depth, - immediate_upstream=atlan_app_deployment.immediate_upstream, - immediate_downstream=atlan_app_deployment.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -655,6 +590,7 @@ def _atlan_app_deployment_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -663,9 +599,6 @@ def _atlan_app_deployment_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_deployment_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/atlan_app_installed.py b/pyatlan_v9/model/assets/atlan_app_installed.py index be1f8b09f..24757de81 100644 --- a/pyatlan_v9/model/assets/atlan_app_installed.py +++ b/pyatlan_v9/model/assets/atlan_app_installed.py @@ -38,11 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .atlan_app_related import ( - RelatedAtlanAppInstalled, - RelatedAtlanAppTool, - RelatedAtlanAppWorkflow, -) +from .atlan_app_related import RelatedAtlanAppTool, RelatedAtlanAppWorkflow from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -106,6 +102,8 @@ class AtlanAppInstalled(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlanAppInstalled" + atlan_app_current_version_id: Union[int, None, UnsetType] = UNSET """Current version identifier for the atlan application.""" @@ -233,66 +231,6 @@ class AtlanAppInstalled(Asset): def __post_init__(self) -> None: self.type_name = "AtlanAppInstalled" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanAppInstalled instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AtlanAppInstalled validation failed: {errors}") - - def minimize(self) -> "AtlanAppInstalled": - """ - Return a minimal copy of this AtlanAppInstalled with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanAppInstalled with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanAppInstalled instance with only the minimum required fields. - """ - self.validate() - return AtlanAppInstalled(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanAppInstalled": - """ - Create a :class:`RelatedAtlanAppInstalled` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanAppInstalled reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanAppInstalled(guid=self.guid) - return RelatedAtlanAppInstalled(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -599,9 +537,6 @@ def _atlan_app_installed_to_nested( is_incomplete=atlan_app_installed.is_incomplete, provenance_type=atlan_app_installed.provenance_type, home_id=atlan_app_installed.home_id, - depth=atlan_app_installed.depth, - immediate_upstream=atlan_app_installed.immediate_upstream, - immediate_downstream=atlan_app_installed.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -637,6 +572,7 @@ def _atlan_app_installed_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -645,9 +581,6 @@ def _atlan_app_installed_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_installed_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/atlan_app_related.py b/pyatlan_v9/model/assets/atlan_app_related.py index 13605625c..80ad9c470 100644 --- a/pyatlan_v9/model/assets/atlan_app_related.py +++ b/pyatlan_v9/model/assets/atlan_app_related.py @@ -126,16 +126,16 @@ class RelatedAtlanAppTool(RelatedAtlanApp): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "AtlanAppTool" so it serializes correctly - atlan_app_tool_input_schema: Union[str, None, UnsetType] = UNSET + atlan_app_input_schema: Union[str, None, UnsetType] = UNSET """Input schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_output_schema: Union[str, None, UnsetType] = UNSET + atlan_app_output_schema: Union[str, None, UnsetType] = UNSET """Output schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_task_queue: Union[str, None, UnsetType] = UNSET + atlan_app_task_queue: Union[str, None, UnsetType] = UNSET """Name of the Temporal task queue for the Atlan application tool.""" - atlan_app_tool_category: Union[str, None, UnsetType] = UNSET + atlan_app_category: Union[str, None, UnsetType] = UNSET """Category of the tool.""" def __post_init__(self) -> None: @@ -153,25 +153,25 @@ class RelatedAtlanAppWorkflow(RelatedAtlanApp): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "AtlanAppWorkflow" so it serializes correctly - atlan_app_workflow_version: Union[str, None, UnsetType] = UNSET + atlan_app_version: Union[str, None, UnsetType] = UNSET """Version of the workflow.""" - atlan_app_workflow_slug: Union[str, None, UnsetType] = UNSET + atlan_app_slug: Union[str, None, UnsetType] = UNSET """Slug of the workflow.""" - atlan_app_workflow_dag: Union[str, None, UnsetType] = UNSET + atlan_app_dag: Union[str, None, UnsetType] = UNSET """Map of all activity steps for the workflow (escaped JSON string).""" - atlan_app_workflow_status: Union[str, None, UnsetType] = UNSET + atlan_app_status: Union[str, None, UnsetType] = UNSET """Status of the workflow.""" - atlan_app_workflow_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET + atlan_app_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET """Error handling strategy for the workflow.""" - atlan_app_workflow_ownership: Union[str, None, UnsetType] = UNSET + atlan_app_ownership: Union[str, None, UnsetType] = UNSET """Ownership type of the workflow, indicating whether it is managed by Atlan or by a user.""" - atlan_app_workflow_triggers: Union[str, None, UnsetType] = UNSET + atlan_app_triggers: Union[str, None, UnsetType] = UNSET """Triggers configured for this workflow (escaped JSON string).""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/atlan_app_tool.py b/pyatlan_v9/model/assets/atlan_app_tool.py index 514bd04b3..68e069085 100644 --- a/pyatlan_v9/model/assets/atlan_app_tool.py +++ b/pyatlan_v9/model/assets/atlan_app_tool.py @@ -67,10 +67,10 @@ class AtlanAppTool(Asset): Instance of a tool defined in an Atlan application. """ - ATLAN_APP_TOOL_INPUT_SCHEMA: ClassVar[Any] = None - ATLAN_APP_TOOL_OUTPUT_SCHEMA: ClassVar[Any] = None - ATLAN_APP_TOOL_TASK_QUEUE: ClassVar[Any] = None - ATLAN_APP_TOOL_CATEGORY: ClassVar[Any] = None + ATLAN_APP_INPUT_SCHEMA: ClassVar[Any] = None + ATLAN_APP_OUTPUT_SCHEMA: ClassVar[Any] = None + ATLAN_APP_TASK_QUEUE: ClassVar[Any] = None + ATLAN_APP_CATEGORY: ClassVar[Any] = None ATLAN_APP_QUALIFIED_NAME: ClassVar[Any] = None ATLAN_APP_NAME: ClassVar[Any] = None ATLAN_APP_METADATA: ClassVar[Any] = None @@ -107,16 +107,18 @@ class AtlanAppTool(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - atlan_app_tool_input_schema: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "AtlanAppTool" + + atlan_app_input_schema: Union[str, None, UnsetType] = UNSET """Input schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_output_schema: Union[str, None, UnsetType] = UNSET + atlan_app_output_schema: Union[str, None, UnsetType] = UNSET """Output schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_task_queue: Union[str, None, UnsetType] = UNSET + atlan_app_task_queue: Union[str, None, UnsetType] = UNSET """Name of the Temporal task queue for the Atlan application tool.""" - atlan_app_tool_category: Union[str, None, UnsetType] = UNSET + atlan_app_category: Union[str, None, UnsetType] = UNSET """Category of the tool.""" atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET @@ -241,74 +243,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanAppTool instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_app_name is UNSET: - errors.append("atlan_app_name is required for creation") - if self.atlan_app_qualified_name is UNSET: - errors.append("atlan_app_qualified_name is required for creation") - if errors: - raise ValueError(f"AtlanAppTool validation failed: {errors}") - - def minimize(self) -> "AtlanAppTool": - """ - Return a minimal copy of this AtlanAppTool with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanAppTool with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanAppTool instance with only the minimum required fields. - """ - self.validate() - return AtlanAppTool(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanAppTool": - """ - Create a :class:`RelatedAtlanAppTool` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanAppTool reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanAppTool(guid=self.guid) - return RelatedAtlanAppTool(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -364,16 +298,16 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> AtlanAppToo class AtlanAppToolAttributes(AssetAttributes): """AtlanAppTool-specific attributes for nested API format.""" - atlan_app_tool_input_schema: Union[str, None, UnsetType] = UNSET + atlan_app_input_schema: Union[str, None, UnsetType] = UNSET """Input schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_output_schema: Union[str, None, UnsetType] = UNSET + atlan_app_output_schema: Union[str, None, UnsetType] = UNSET """Output schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_task_queue: Union[str, None, UnsetType] = UNSET + atlan_app_task_queue: Union[str, None, UnsetType] = UNSET """Name of the Temporal task queue for the Atlan application tool.""" - atlan_app_tool_category: Union[str, None, UnsetType] = UNSET + atlan_app_category: Union[str, None, UnsetType] = UNSET """Category of the tool.""" atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET @@ -554,10 +488,10 @@ def _populate_atlan_app_tool_attrs( ) -> None: """Populate AtlanAppTool-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.atlan_app_tool_input_schema = obj.atlan_app_tool_input_schema - attrs.atlan_app_tool_output_schema = obj.atlan_app_tool_output_schema - attrs.atlan_app_tool_task_queue = obj.atlan_app_tool_task_queue - attrs.atlan_app_tool_category = obj.atlan_app_tool_category + attrs.atlan_app_input_schema = obj.atlan_app_input_schema + attrs.atlan_app_output_schema = obj.atlan_app_output_schema + attrs.atlan_app_task_queue = obj.atlan_app_task_queue + attrs.atlan_app_category = obj.atlan_app_category attrs.atlan_app_qualified_name = obj.atlan_app_qualified_name attrs.atlan_app_name = obj.atlan_app_name attrs.atlan_app_metadata = obj.atlan_app_metadata @@ -567,10 +501,10 @@ def _populate_atlan_app_tool_attrs( def _extract_atlan_app_tool_attrs(attrs: AtlanAppToolAttributes) -> dict: """Extract all AtlanAppTool attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["atlan_app_tool_input_schema"] = attrs.atlan_app_tool_input_schema - result["atlan_app_tool_output_schema"] = attrs.atlan_app_tool_output_schema - result["atlan_app_tool_task_queue"] = attrs.atlan_app_tool_task_queue - result["atlan_app_tool_category"] = attrs.atlan_app_tool_category + result["atlan_app_input_schema"] = attrs.atlan_app_input_schema + result["atlan_app_output_schema"] = attrs.atlan_app_output_schema + result["atlan_app_task_queue"] = attrs.atlan_app_task_queue + result["atlan_app_category"] = attrs.atlan_app_category result["atlan_app_qualified_name"] = attrs.atlan_app_qualified_name result["atlan_app_name"] = attrs.atlan_app_name result["atlan_app_metadata"] = attrs.atlan_app_metadata @@ -611,9 +545,6 @@ def _atlan_app_tool_to_nested(atlan_app_tool: AtlanAppTool) -> AtlanAppToolNeste is_incomplete=atlan_app_tool.is_incomplete, provenance_type=atlan_app_tool.provenance_type, home_id=atlan_app_tool.home_id, - depth=atlan_app_tool.depth, - immediate_upstream=atlan_app_tool.immediate_upstream, - immediate_downstream=atlan_app_tool.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -647,6 +578,7 @@ def _atlan_app_tool_from_nested(nested: AtlanAppToolNested) -> AtlanAppTool: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -655,9 +587,6 @@ def _atlan_app_tool_from_nested(nested: AtlanAppToolNested) -> AtlanAppTool: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_tool_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -686,18 +615,16 @@ def _atlan_app_tool_from_nested_bytes(data: bytes, serde: Serde) -> AtlanAppTool TextField, ) -AtlanAppTool.ATLAN_APP_TOOL_INPUT_SCHEMA = TextField( - "atlanAppToolInputSchema", "atlanAppToolInputSchema" -) -AtlanAppTool.ATLAN_APP_TOOL_OUTPUT_SCHEMA = TextField( - "atlanAppToolOutputSchema", "atlanAppToolOutputSchema" +AtlanAppTool.ATLAN_APP_INPUT_SCHEMA = TextField( + "atlanAppInputSchema", "atlanAppInputSchema" ) -AtlanAppTool.ATLAN_APP_TOOL_TASK_QUEUE = KeywordField( - "atlanAppToolTaskQueue", "atlanAppToolTaskQueue" +AtlanAppTool.ATLAN_APP_OUTPUT_SCHEMA = TextField( + "atlanAppOutputSchema", "atlanAppOutputSchema" ) -AtlanAppTool.ATLAN_APP_TOOL_CATEGORY = KeywordField( - "atlanAppToolCategory", "atlanAppToolCategory" +AtlanAppTool.ATLAN_APP_TASK_QUEUE = KeywordField( + "atlanAppTaskQueue", "atlanAppTaskQueue" ) +AtlanAppTool.ATLAN_APP_CATEGORY = KeywordField("atlanAppCategory", "atlanAppCategory") AtlanAppTool.ATLAN_APP_QUALIFIED_NAME = KeywordField( "atlanAppQualifiedName", "atlanAppQualifiedName" ) diff --git a/pyatlan_v9/model/assets/atlan_app_workflow.py b/pyatlan_v9/model/assets/atlan_app_workflow.py index ad5aa62cd..a77a79502 100644 --- a/pyatlan_v9/model/assets/atlan_app_workflow.py +++ b/pyatlan_v9/model/assets/atlan_app_workflow.py @@ -68,13 +68,13 @@ class AtlanAppWorkflow(Asset): Instance of a workflow in an Atlan application. """ - ATLAN_APP_WORKFLOW_VERSION: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_SLUG: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_DAG: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_STATUS: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_ERROR_HANDLING: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_OWNERSHIP: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_TRIGGERS: ClassVar[Any] = None + ATLAN_APP_VERSION: ClassVar[Any] = None + ATLAN_APP_SLUG: ClassVar[Any] = None + ATLAN_APP_DAG: ClassVar[Any] = None + ATLAN_APP_STATUS: ClassVar[Any] = None + ATLAN_APP_ERROR_HANDLING: ClassVar[Any] = None + ATLAN_APP_OWNERSHIP: ClassVar[Any] = None + ATLAN_APP_TRIGGERS: ClassVar[Any] = None ATLAN_APP_QUALIFIED_NAME: ClassVar[Any] = None ATLAN_APP_NAME: ClassVar[Any] = None ATLAN_APP_METADATA: ClassVar[Any] = None @@ -112,25 +112,27 @@ class AtlanAppWorkflow(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - atlan_app_workflow_version: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "AtlanAppWorkflow" + + atlan_app_version: Union[str, None, UnsetType] = UNSET """Version of the workflow.""" - atlan_app_workflow_slug: Union[str, None, UnsetType] = UNSET + atlan_app_slug: Union[str, None, UnsetType] = UNSET """Slug of the workflow.""" - atlan_app_workflow_dag: Union[str, None, UnsetType] = UNSET + atlan_app_dag: Union[str, None, UnsetType] = UNSET """Map of all activity steps for the workflow (escaped JSON string).""" - atlan_app_workflow_status: Union[str, None, UnsetType] = UNSET + atlan_app_status: Union[str, None, UnsetType] = UNSET """Status of the workflow.""" - atlan_app_workflow_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET + atlan_app_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET """Error handling strategy for the workflow.""" - atlan_app_workflow_ownership: Union[str, None, UnsetType] = UNSET + atlan_app_ownership: Union[str, None, UnsetType] = UNSET """Ownership type of the workflow, indicating whether it is managed by Atlan or by a user.""" - atlan_app_workflow_triggers: Union[str, None, UnsetType] = UNSET + atlan_app_triggers: Union[str, None, UnsetType] = UNSET """Triggers configured for this workflow (escaped JSON string).""" atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET @@ -258,74 +260,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanAppWorkflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_app_name is UNSET: - errors.append("atlan_app_name is required for creation") - if self.atlan_app_qualified_name is UNSET: - errors.append("atlan_app_qualified_name is required for creation") - if errors: - raise ValueError(f"AtlanAppWorkflow validation failed: {errors}") - - def minimize(self) -> "AtlanAppWorkflow": - """ - Return a minimal copy of this AtlanAppWorkflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanAppWorkflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanAppWorkflow instance with only the minimum required fields. - """ - self.validate() - return AtlanAppWorkflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanAppWorkflow": - """ - Create a :class:`RelatedAtlanAppWorkflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanAppWorkflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanAppWorkflow(guid=self.guid) - return RelatedAtlanAppWorkflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -383,25 +317,25 @@ def from_json( class AtlanAppWorkflowAttributes(AssetAttributes): """AtlanAppWorkflow-specific attributes for nested API format.""" - atlan_app_workflow_version: Union[str, None, UnsetType] = UNSET + atlan_app_version: Union[str, None, UnsetType] = UNSET """Version of the workflow.""" - atlan_app_workflow_slug: Union[str, None, UnsetType] = UNSET + atlan_app_slug: Union[str, None, UnsetType] = UNSET """Slug of the workflow.""" - atlan_app_workflow_dag: Union[str, None, UnsetType] = UNSET + atlan_app_dag: Union[str, None, UnsetType] = UNSET """Map of all activity steps for the workflow (escaped JSON string).""" - atlan_app_workflow_status: Union[str, None, UnsetType] = UNSET + atlan_app_status: Union[str, None, UnsetType] = UNSET """Status of the workflow.""" - atlan_app_workflow_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET + atlan_app_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET """Error handling strategy for the workflow.""" - atlan_app_workflow_ownership: Union[str, None, UnsetType] = UNSET + atlan_app_ownership: Union[str, None, UnsetType] = UNSET """Ownership type of the workflow, indicating whether it is managed by Atlan or by a user.""" - atlan_app_workflow_triggers: Union[str, None, UnsetType] = UNSET + atlan_app_triggers: Union[str, None, UnsetType] = UNSET """Triggers configured for this workflow (escaped JSON string).""" atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET @@ -586,13 +520,13 @@ def _populate_atlan_app_workflow_attrs( ) -> None: """Populate AtlanAppWorkflow-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.atlan_app_workflow_version = obj.atlan_app_workflow_version - attrs.atlan_app_workflow_slug = obj.atlan_app_workflow_slug - attrs.atlan_app_workflow_dag = obj.atlan_app_workflow_dag - attrs.atlan_app_workflow_status = obj.atlan_app_workflow_status - attrs.atlan_app_workflow_error_handling = obj.atlan_app_workflow_error_handling - attrs.atlan_app_workflow_ownership = obj.atlan_app_workflow_ownership - attrs.atlan_app_workflow_triggers = obj.atlan_app_workflow_triggers + attrs.atlan_app_version = obj.atlan_app_version + attrs.atlan_app_slug = obj.atlan_app_slug + attrs.atlan_app_dag = obj.atlan_app_dag + attrs.atlan_app_status = obj.atlan_app_status + attrs.atlan_app_error_handling = obj.atlan_app_error_handling + attrs.atlan_app_ownership = obj.atlan_app_ownership + attrs.atlan_app_triggers = obj.atlan_app_triggers attrs.atlan_app_qualified_name = obj.atlan_app_qualified_name attrs.atlan_app_name = obj.atlan_app_name attrs.atlan_app_metadata = obj.atlan_app_metadata @@ -602,15 +536,13 @@ def _populate_atlan_app_workflow_attrs( def _extract_atlan_app_workflow_attrs(attrs: AtlanAppWorkflowAttributes) -> dict: """Extract all AtlanAppWorkflow attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["atlan_app_workflow_version"] = attrs.atlan_app_workflow_version - result["atlan_app_workflow_slug"] = attrs.atlan_app_workflow_slug - result["atlan_app_workflow_dag"] = attrs.atlan_app_workflow_dag - result["atlan_app_workflow_status"] = attrs.atlan_app_workflow_status - result["atlan_app_workflow_error_handling"] = ( - attrs.atlan_app_workflow_error_handling - ) - result["atlan_app_workflow_ownership"] = attrs.atlan_app_workflow_ownership - result["atlan_app_workflow_triggers"] = attrs.atlan_app_workflow_triggers + result["atlan_app_version"] = attrs.atlan_app_version + result["atlan_app_slug"] = attrs.atlan_app_slug + result["atlan_app_dag"] = attrs.atlan_app_dag + result["atlan_app_status"] = attrs.atlan_app_status + result["atlan_app_error_handling"] = attrs.atlan_app_error_handling + result["atlan_app_ownership"] = attrs.atlan_app_ownership + result["atlan_app_triggers"] = attrs.atlan_app_triggers result["atlan_app_qualified_name"] = attrs.atlan_app_qualified_name result["atlan_app_name"] = attrs.atlan_app_name result["atlan_app_metadata"] = attrs.atlan_app_metadata @@ -655,9 +587,6 @@ def _atlan_app_workflow_to_nested( is_incomplete=atlan_app_workflow.is_incomplete, provenance_type=atlan_app_workflow.provenance_type, home_id=atlan_app_workflow.home_id, - depth=atlan_app_workflow.depth, - immediate_upstream=atlan_app_workflow.immediate_upstream, - immediate_downstream=atlan_app_workflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -691,6 +620,7 @@ def _atlan_app_workflow_from_nested(nested: AtlanAppWorkflowNested) -> AtlanAppW updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -699,9 +629,6 @@ def _atlan_app_workflow_from_nested(nested: AtlanAppWorkflowNested) -> AtlanAppW is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_workflow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -732,27 +659,17 @@ def _atlan_app_workflow_from_nested_bytes( TextField, ) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_VERSION = KeywordField( - "atlanAppWorkflowVersion", "atlanAppWorkflowVersion" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_SLUG = KeywordField( - "atlanAppWorkflowSlug", "atlanAppWorkflowSlug" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_DAG = TextField( - "atlanAppWorkflowDag", "atlanAppWorkflowDag" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_STATUS = KeywordField( - "atlanAppWorkflowStatus", "atlanAppWorkflowStatus" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_ERROR_HANDLING = KeywordField( - "atlanAppWorkflowErrorHandling", "atlanAppWorkflowErrorHandling" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_OWNERSHIP = KeywordField( - "atlanAppWorkflowOwnership", "atlanAppWorkflowOwnership" +AtlanAppWorkflow.ATLAN_APP_VERSION = KeywordField("atlanAppVersion", "atlanAppVersion") +AtlanAppWorkflow.ATLAN_APP_SLUG = KeywordField("atlanAppSlug", "atlanAppSlug") +AtlanAppWorkflow.ATLAN_APP_DAG = TextField("atlanAppDag", "atlanAppDag") +AtlanAppWorkflow.ATLAN_APP_STATUS = KeywordField("atlanAppStatus", "atlanAppStatus") +AtlanAppWorkflow.ATLAN_APP_ERROR_HANDLING = KeywordField( + "atlanAppErrorHandling", "atlanAppErrorHandling" ) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_TRIGGERS = TextField( - "atlanAppWorkflowTriggers", "atlanAppWorkflowTriggers" +AtlanAppWorkflow.ATLAN_APP_OWNERSHIP = KeywordField( + "atlanAppOwnership", "atlanAppOwnership" ) +AtlanAppWorkflow.ATLAN_APP_TRIGGERS = TextField("atlanAppTriggers", "atlanAppTriggers") AtlanAppWorkflow.ATLAN_APP_QUALIFIED_NAME = KeywordField( "atlanAppQualifiedName", "atlanAppQualifiedName" ) diff --git a/pyatlan_v9/model/assets/atlas_glossary.py b/pyatlan_v9/model/assets/atlas_glossary.py index facf603ea..aff011d59 100644 --- a/pyatlan_v9/model/assets/atlas_glossary.py +++ b/pyatlan_v9/model/assets/atlas_glossary.py @@ -40,11 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .gtc_related import ( - RelatedAtlasGlossary, - RelatedAtlasGlossaryCategory, - RelatedAtlasGlossaryTerm, -) +from .gtc_related import RelatedAtlasGlossaryCategory, RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -89,6 +85,8 @@ class AtlasGlossary(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlasGlossary" + short_description: Union[str, None, UnsetType] = UNSET """Unused. A short definition of the glossary. See 'description' and 'userDescription' instead.""" @@ -176,70 +174,6 @@ class AtlasGlossary(Asset): def __post_init__(self) -> None: self.type_name = "AtlasGlossary" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlasGlossary instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if self.guid is UNSET: - errors.append("guid is required") - if errors: - raise ValueError(f"AtlasGlossary validation failed: {errors}") - - def minimize(self) -> "AtlasGlossary": - """ - Return a minimal copy of this AtlasGlossary with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlasGlossary with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlasGlossary instance with only the minimum required fields. - """ - self.validate() - return AtlasGlossary( - guid=self.guid, name=self.name, qualified_name=self.qualified_name - ) - - def relate(self) -> "RelatedAtlasGlossary": - """ - Create a :class:`RelatedAtlasGlossary` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlasGlossary reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlasGlossary(guid=self.guid) - return RelatedAtlasGlossary(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, name: str) -> "AtlasGlossary": @@ -523,9 +457,6 @@ def _atlas_glossary_to_nested(atlas_glossary: AtlasGlossary) -> AtlasGlossaryNes is_incomplete=atlas_glossary.is_incomplete, provenance_type=atlas_glossary.provenance_type, home_id=atlas_glossary.home_id, - depth=atlas_glossary.depth, - immediate_upstream=atlas_glossary.immediate_upstream, - immediate_downstream=atlas_glossary.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -559,6 +490,7 @@ def _atlas_glossary_from_nested(nested: AtlasGlossaryNested) -> AtlasGlossary: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -567,9 +499,6 @@ def _atlas_glossary_from_nested(nested: AtlasGlossaryNested) -> AtlasGlossary: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlas_glossary_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/atlas_glossary_category.py b/pyatlan_v9/model/assets/atlas_glossary_category.py index 2c1abcfb4..ccd5ff3af 100644 --- a/pyatlan_v9/model/assets/atlas_glossary_category.py +++ b/pyatlan_v9/model/assets/atlas_glossary_category.py @@ -66,7 +66,6 @@ class AtlasGlossaryCategory(Asset): LONG_DESCRIPTION: ClassVar[Any] = None ADDITIONAL_ATTRIBUTES: ClassVar[Any] = None CATEGORY_TYPE: ClassVar[Any] = None - ANCHOR: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None @@ -77,6 +76,7 @@ class AtlasGlossaryCategory(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None TERMS: ClassVar[Any] = None + ANCHOR: ClassVar[Any] = None CHILDREN_CATEGORIES: ClassVar[Any] = None PARENT_CATEGORY: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -89,6 +89,8 @@ class AtlasGlossaryCategory(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlasGlossaryCategory" + short_description: Union[str, None, UnsetType] = UNSET """Unused. Brief summary of the category. See 'description' and 'userDescription' instead.""" @@ -101,9 +103,6 @@ class AtlasGlossaryCategory(Asset): category_type: Union[str, None, UnsetType] = UNSET """""" - anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET - """Glossary in which this category is contained.""" - anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET """Checks that run on this asset.""" @@ -136,6 +135,9 @@ class AtlasGlossaryCategory(Asset): terms: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Terms organized within this category.""" + anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET + """Glossary in which this category is contained.""" + children_categories: Union[List[RelatedAtlasGlossaryCategory], None, UnsetType] = ( UNSET ) @@ -178,71 +180,6 @@ class AtlasGlossaryCategory(Asset): def __post_init__(self) -> None: self.type_name = "AtlasGlossaryCategory" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlasGlossaryCategory instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.anchor is UNSET: - errors.append("anchor is required for creation") - if errors: - raise ValueError(f"AtlasGlossaryCategory validation failed: {errors}") - - def minimize(self) -> "AtlasGlossaryCategory": - """ - Return a minimal copy of this AtlasGlossaryCategory with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlasGlossaryCategory with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlasGlossaryCategory instance with only the minimum required fields. - """ - self.validate() - return AtlasGlossaryCategory( - qualified_name=self.qualified_name, name=self.name, anchor=self.anchor - ) - - def relate(self) -> "RelatedAtlasGlossaryCategory": - """ - Create a :class:`RelatedAtlasGlossaryCategory` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlasGlossaryCategory reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlasGlossaryCategory(guid=self.guid) - return RelatedAtlasGlossaryCategory(qualified_name=self.qualified_name) - @classmethod def can_be_archived(cls) -> bool: return False @@ -478,9 +415,6 @@ class AtlasGlossaryCategoryAttributes(AssetAttributes): category_type: Union[str, None, UnsetType] = UNSET """""" - anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET - """Glossary in which this category is contained.""" - class AtlasGlossaryCategoryRelationshipAttributes(AssetRelationshipAttributes): """AtlasGlossaryCategory-specific relationship attributes for nested API format.""" @@ -517,6 +451,9 @@ class AtlasGlossaryCategoryRelationshipAttributes(AssetRelationshipAttributes): terms: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Terms organized within this category.""" + anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET + """Glossary in which this category is contained.""" + children_categories: Union[List[RelatedAtlasGlossaryCategory], None, UnsetType] = ( UNSET ) @@ -588,6 +525,7 @@ class AtlasGlossaryCategoryNested(AssetNested): "dq_reference_dataset_rules", "meanings", "terms", + "anchor", "children_categories", "parent_category", "mc_monitors", @@ -611,7 +549,6 @@ def _populate_atlas_glossary_category_attrs( attrs.long_description = obj.long_description attrs.additional_attributes = obj.additional_attributes attrs.category_type = obj.category_type - attrs.anchor = obj.anchor def _extract_atlas_glossary_category_attrs( @@ -623,7 +560,6 @@ def _extract_atlas_glossary_category_attrs( result["long_description"] = attrs.long_description result["additional_attributes"] = attrs.additional_attributes result["category_type"] = attrs.category_type - result["anchor"] = attrs.anchor return result @@ -664,9 +600,6 @@ def _atlas_glossary_category_to_nested( is_incomplete=atlas_glossary_category.is_incomplete, provenance_type=atlas_glossary_category.provenance_type, home_id=atlas_glossary_category.home_id, - depth=atlas_glossary_category.depth, - immediate_upstream=atlas_glossary_category.immediate_upstream, - immediate_downstream=atlas_glossary_category.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -702,6 +635,7 @@ def _atlas_glossary_category_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -710,9 +644,6 @@ def _atlas_glossary_category_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlas_glossary_category_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -749,7 +680,6 @@ def _atlas_glossary_category_from_nested_bytes( "additionalAttributes", "additionalAttributes" ) AtlasGlossaryCategory.CATEGORY_TYPE = KeywordField("categoryType", "categoryType") -AtlasGlossaryCategory.ANCHOR = KeywordField("anchor", "anchor") AtlasGlossaryCategory.ANOMALO_CHECKS = RelationField("anomaloChecks") AtlasGlossaryCategory.APPLICATION = RelationField("application") AtlasGlossaryCategory.APPLICATION_FIELD = RelationField("applicationField") @@ -764,6 +694,7 @@ def _atlas_glossary_category_from_nested_bytes( ) AtlasGlossaryCategory.MEANINGS = RelationField("meanings") AtlasGlossaryCategory.TERMS = RelationField("terms") +AtlasGlossaryCategory.ANCHOR = RelationField("anchor") AtlasGlossaryCategory.CHILDREN_CATEGORIES = RelationField("childrenCategories") AtlasGlossaryCategory.PARENT_CATEGORY = RelationField("parentCategory") AtlasGlossaryCategory.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/atlas_glossary_term.py b/pyatlan_v9/model/assets/atlas_glossary_term.py index bbd635f64..a0f1b65d5 100644 --- a/pyatlan_v9/model/assets/atlas_glossary_term.py +++ b/pyatlan_v9/model/assets/atlas_glossary_term.py @@ -69,7 +69,6 @@ class AtlasGlossaryTerm(Asset): USAGE: ClassVar[Any] = None ADDITIONAL_ATTRIBUTES: ClassVar[Any] = None TERM_TYPE: ClassVar[Any] = None - ANCHOR: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None @@ -80,6 +79,7 @@ class AtlasGlossaryTerm(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None ASSIGNED_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None + ANCHOR: ClassVar[Any] = None CATEGORIES: ClassVar[Any] = None SEE_ALSO: ClassVar[Any] = None SYNONYMS: ClassVar[Any] = None @@ -104,6 +104,8 @@ class AtlasGlossaryTerm(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlasGlossaryTerm" + short_description: Union[str, None, UnsetType] = UNSET """Unused. Brief summary of the term. See 'description' and 'userDescription' instead.""" @@ -125,9 +127,6 @@ class AtlasGlossaryTerm(Asset): term_type: Union[str, None, UnsetType] = UNSET """""" - anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET - """Glossary in which this term is contained.""" - anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET """Checks that run on this asset.""" @@ -160,6 +159,9 @@ class AtlasGlossaryTerm(Asset): meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" + anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET + """Glossary in which this term is contained.""" + categories: Union[List[RelatedAtlasGlossaryCategory], None, UnsetType] = UNSET """Categories within which this term is organized.""" @@ -236,71 +238,6 @@ class AtlasGlossaryTerm(Asset): def __post_init__(self) -> None: self.type_name = "AtlasGlossaryTerm" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlasGlossaryTerm instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.anchor is UNSET: - errors.append("anchor is required for creation") - if errors: - raise ValueError(f"AtlasGlossaryTerm validation failed: {errors}") - - def minimize(self) -> "AtlasGlossaryTerm": - """ - Return a minimal copy of this AtlasGlossaryTerm with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlasGlossaryTerm with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlasGlossaryTerm instance with only the minimum required fields. - """ - self.validate() - return AtlasGlossaryTerm( - qualified_name=self.qualified_name, name=self.name, anchor=self.anchor - ) - - def relate(self) -> "RelatedAtlasGlossaryTerm": - """ - Create a :class:`RelatedAtlasGlossaryTerm` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlasGlossaryTerm reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlasGlossaryTerm(guid=self.guid) - return RelatedAtlasGlossaryTerm(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -527,9 +464,6 @@ class AtlasGlossaryTermAttributes(AssetAttributes): term_type: Union[str, None, UnsetType] = UNSET """""" - anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET - """Glossary in which this term is contained.""" - class AtlasGlossaryTermRelationshipAttributes(AssetRelationshipAttributes): """AtlasGlossaryTerm-specific relationship attributes for nested API format.""" @@ -566,6 +500,9 @@ class AtlasGlossaryTermRelationshipAttributes(AssetRelationshipAttributes): meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" + anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET + """Glossary in which this term is contained.""" + categories: Union[List[RelatedAtlasGlossaryCategory], None, UnsetType] = UNSET """Categories within which this term is organized.""" @@ -671,6 +608,7 @@ class AtlasGlossaryTermNested(AssetNested): "dq_reference_dataset_rules", "assigned_entities", "meanings", + "anchor", "categories", "see_also", "synonyms", @@ -709,7 +647,6 @@ def _populate_atlas_glossary_term_attrs( attrs.usage = obj.usage attrs.additional_attributes = obj.additional_attributes attrs.term_type = obj.term_type - attrs.anchor = obj.anchor def _extract_atlas_glossary_term_attrs(attrs: AtlasGlossaryTermAttributes) -> dict: @@ -722,7 +659,6 @@ def _extract_atlas_glossary_term_attrs(attrs: AtlasGlossaryTermAttributes) -> di result["usage"] = attrs.usage result["additional_attributes"] = attrs.additional_attributes result["term_type"] = attrs.term_type - result["anchor"] = attrs.anchor return result @@ -763,9 +699,6 @@ def _atlas_glossary_term_to_nested( is_incomplete=atlas_glossary_term.is_incomplete, provenance_type=atlas_glossary_term.provenance_type, home_id=atlas_glossary_term.home_id, - depth=atlas_glossary_term.depth, - immediate_upstream=atlas_glossary_term.immediate_upstream, - immediate_downstream=atlas_glossary_term.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -801,6 +734,7 @@ def _atlas_glossary_term_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -809,9 +743,6 @@ def _atlas_glossary_term_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlas_glossary_term_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -849,7 +780,6 @@ def _atlas_glossary_term_from_nested_bytes( "additionalAttributes", "additionalAttributes" ) AtlasGlossaryTerm.TERM_TYPE = KeywordField("termType", "termType") -AtlasGlossaryTerm.ANCHOR = KeywordField("anchor", "anchor") AtlasGlossaryTerm.ANOMALO_CHECKS = RelationField("anomaloChecks") AtlasGlossaryTerm.APPLICATION = RelationField("application") AtlasGlossaryTerm.APPLICATION_FIELD = RelationField("applicationField") @@ -860,6 +790,7 @@ def _atlas_glossary_term_from_nested_bytes( AtlasGlossaryTerm.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") AtlasGlossaryTerm.ASSIGNED_ENTITIES = RelationField("assignedEntities") AtlasGlossaryTerm.MEANINGS = RelationField("meanings") +AtlasGlossaryTerm.ANCHOR = RelationField("anchor") AtlasGlossaryTerm.CATEGORIES = RelationField("categories") AtlasGlossaryTerm.SEE_ALSO = RelationField("seeAlso") AtlasGlossaryTerm.SYNONYMS = RelationField("synonyms") diff --git a/pyatlan_v9/model/assets/aws.py b/pyatlan_v9/model/assets/aws.py index 1328a1237..98c32ee8a 100644 --- a/pyatlan_v9/model/assets/aws.py +++ b/pyatlan_v9/model/assets/aws.py @@ -36,7 +36,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cloud_related import RelatedAWS from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -86,6 +85,8 @@ class AWS(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AWS" + aws_arn: Union[str, None, UnsetType] = UNSET """DEPRECATED: This legacy attribute must be unique across all AWS asset instances. This can create non-obvious edge cases for creating / updating assets, and we therefore recommended NOT using it. See and use cloudResourceName instead.""" @@ -179,66 +180,6 @@ class AWS(Asset): def __post_init__(self) -> None: self.type_name = "AWS" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AWS instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AWS validation failed: {errors}") - - def minimize(self) -> "AWS": - """ - Return a minimal copy of this AWS with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AWS with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AWS instance with only the minimum required fields. - """ - self.validate() - return AWS(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAWS": - """ - Create a :class:`RelatedAWS` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAWS reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAWS(guid=self.guid) - return RelatedAWS(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -489,9 +430,6 @@ def _aws_to_nested(aws: AWS) -> AWSNested: is_incomplete=aws.is_incomplete, provenance_type=aws.provenance_type, home_id=aws.home_id, - depth=aws.depth, - immediate_upstream=aws.immediate_upstream, - immediate_downstream=aws.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -521,6 +459,7 @@ def _aws_from_nested(nested: AWSNested) -> AWS: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -529,9 +468,6 @@ def _aws_from_nested(nested: AWSNested) -> AWS: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_aws_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/azure.py b/pyatlan_v9/model/assets/azure.py index 12eb7b4f0..a27ce7f64 100644 --- a/pyatlan_v9/model/assets/azure.py +++ b/pyatlan_v9/model/assets/azure.py @@ -36,7 +36,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cloud_related import RelatedAzure from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -81,6 +80,8 @@ class Azure(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Azure" + azure_resource_id: Union[str, None, UnsetType] = UNSET """Resource identifier of this asset in Azure.""" @@ -159,66 +160,6 @@ class Azure(Asset): def __post_init__(self) -> None: self.type_name = "Azure" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Azure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Azure validation failed: {errors}") - - def minimize(self) -> "Azure": - """ - Return a minimal copy of this Azure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Azure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Azure instance with only the minimum required fields. - """ - self.validate() - return Azure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAzure": - """ - Create a :class:`RelatedAzure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzure(guid=self.guid) - return RelatedAzure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -448,9 +389,6 @@ def _azure_to_nested(azure: Azure) -> AzureNested: is_incomplete=azure.is_incomplete, provenance_type=azure.provenance_type, home_id=azure.home_id, - depth=azure.depth, - immediate_upstream=azure.immediate_upstream, - immediate_downstream=azure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -480,6 +418,7 @@ def _azure_from_nested(nested: AzureNested) -> Azure: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -488,9 +427,6 @@ def _azure_from_nested(nested: AzureNested) -> Azure: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/azure_service_bus.py b/pyatlan_v9/model/assets/azure_service_bus.py index f26975a9d..079a3923c 100644 --- a/pyatlan_v9/model/assets/azure_service_bus.py +++ b/pyatlan_v9/model/assets/azure_service_bus.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .azure_service_bus_related import RelatedAzureServiceBus from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -94,6 +93,8 @@ class AzureServiceBus(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AzureServiceBus" + azure_service_bus_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AzureServiceBus Namespace in which this asset exists.""" @@ -198,66 +199,6 @@ class AzureServiceBus(Asset): def __post_init__(self) -> None: self.type_name = "AzureServiceBus" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AzureServiceBus instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AzureServiceBus validation failed: {errors}") - - def minimize(self) -> "AzureServiceBus": - """ - Return a minimal copy of this AzureServiceBus with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AzureServiceBus with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AzureServiceBus instance with only the minimum required fields. - """ - self.validate() - return AzureServiceBus(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAzureServiceBus": - """ - Create a :class:`RelatedAzureServiceBus` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzureServiceBus reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzureServiceBus(guid=self.guid) - return RelatedAzureServiceBus(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -537,9 +478,6 @@ def _azure_service_bus_to_nested( is_incomplete=azure_service_bus.is_incomplete, provenance_type=azure_service_bus.provenance_type, home_id=azure_service_bus.home_id, - depth=azure_service_bus.depth, - immediate_upstream=azure_service_bus.immediate_upstream, - immediate_downstream=azure_service_bus.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -573,6 +511,7 @@ def _azure_service_bus_from_nested(nested: AzureServiceBusNested) -> AzureServic updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -581,9 +520,6 @@ def _azure_service_bus_from_nested(nested: AzureServiceBusNested) -> AzureServic is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_service_bus_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/azure_service_bus_namespace.py b/pyatlan_v9/model/assets/azure_service_bus_namespace.py index 963b3ab9d..0b2da2303 100644 --- a/pyatlan_v9/model/assets/azure_service_bus_namespace.py +++ b/pyatlan_v9/model/assets/azure_service_bus_namespace.py @@ -37,10 +37,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .azure_service_bus_related import ( - RelatedAzureServiceBusNamespace, - RelatedAzureServiceBusTopic, -) +from .azure_service_bus_related import RelatedAzureServiceBusTopic from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -98,6 +95,8 @@ class AzureServiceBusNamespace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AzureServiceBusNamespace" + azure_service_bus_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AzureServiceBus Namespace in which this asset exists.""" @@ -207,68 +206,6 @@ class AzureServiceBusNamespace(Asset): def __post_init__(self) -> None: self.type_name = "AzureServiceBusNamespace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AzureServiceBusNamespace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AzureServiceBusNamespace validation failed: {errors}") - - def minimize(self) -> "AzureServiceBusNamespace": - """ - Return a minimal copy of this AzureServiceBusNamespace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AzureServiceBusNamespace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AzureServiceBusNamespace instance with only the minimum required fields. - """ - self.validate() - return AzureServiceBusNamespace( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedAzureServiceBusNamespace": - """ - Create a :class:`RelatedAzureServiceBusNamespace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzureServiceBusNamespace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzureServiceBusNamespace(guid=self.guid) - return RelatedAzureServiceBusNamespace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -556,9 +493,6 @@ def _azure_service_bus_namespace_to_nested( is_incomplete=azure_service_bus_namespace.is_incomplete, provenance_type=azure_service_bus_namespace.provenance_type, home_id=azure_service_bus_namespace.home_id, - depth=azure_service_bus_namespace.depth, - immediate_upstream=azure_service_bus_namespace.immediate_upstream, - immediate_downstream=azure_service_bus_namespace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -594,6 +528,7 @@ def _azure_service_bus_namespace_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -602,9 +537,6 @@ def _azure_service_bus_namespace_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_service_bus_namespace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/azure_service_bus_schema.py b/pyatlan_v9/model/assets/azure_service_bus_schema.py index b43bf2310..a0577a9e1 100644 --- a/pyatlan_v9/model/assets/azure_service_bus_schema.py +++ b/pyatlan_v9/model/assets/azure_service_bus_schema.py @@ -37,10 +37,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .azure_service_bus_related import ( - RelatedAzureServiceBusSchema, - RelatedAzureServiceBusTopic, -) +from .azure_service_bus_related import RelatedAzureServiceBusTopic from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -98,6 +95,8 @@ class AzureServiceBusSchema(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AzureServiceBusSchema" + azure_service_bus_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AzureServiceBus Namespace in which this asset exists.""" @@ -207,66 +206,6 @@ class AzureServiceBusSchema(Asset): def __post_init__(self) -> None: self.type_name = "AzureServiceBusSchema" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AzureServiceBusSchema instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AzureServiceBusSchema validation failed: {errors}") - - def minimize(self) -> "AzureServiceBusSchema": - """ - Return a minimal copy of this AzureServiceBusSchema with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AzureServiceBusSchema with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AzureServiceBusSchema instance with only the minimum required fields. - """ - self.validate() - return AzureServiceBusSchema(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAzureServiceBusSchema": - """ - Create a :class:`RelatedAzureServiceBusSchema` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzureServiceBusSchema reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzureServiceBusSchema(guid=self.guid) - return RelatedAzureServiceBusSchema(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -554,9 +493,6 @@ def _azure_service_bus_schema_to_nested( is_incomplete=azure_service_bus_schema.is_incomplete, provenance_type=azure_service_bus_schema.provenance_type, home_id=azure_service_bus_schema.home_id, - depth=azure_service_bus_schema.depth, - immediate_upstream=azure_service_bus_schema.immediate_upstream, - immediate_downstream=azure_service_bus_schema.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -592,6 +528,7 @@ def _azure_service_bus_schema_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -600,9 +537,6 @@ def _azure_service_bus_schema_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_service_bus_schema_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/azure_service_bus_topic.py b/pyatlan_v9/model/assets/azure_service_bus_topic.py index 22e978ffc..9ab3edc6a 100644 --- a/pyatlan_v9/model/assets/azure_service_bus_topic.py +++ b/pyatlan_v9/model/assets/azure_service_bus_topic.py @@ -41,7 +41,6 @@ from .azure_service_bus_related import ( RelatedAzureServiceBusNamespace, RelatedAzureServiceBusSchema, - RelatedAzureServiceBusTopic, ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric @@ -101,6 +100,8 @@ class AzureServiceBusTopic(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AzureServiceBusTopic" + azure_service_bus_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AzureServiceBus Namespace in which this asset exists.""" @@ -221,80 +222,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AzureServiceBusTopic instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.azure_service_bus_namespace is UNSET: - errors.append("azure_service_bus_namespace is required for creation") - if self.azure_service_bus_namespace_name is UNSET: - errors.append( - "azure_service_bus_namespace_name is required for creation" - ) - if self.azure_service_bus_namespace_qualified_name is UNSET: - errors.append( - "azure_service_bus_namespace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AzureServiceBusTopic validation failed: {errors}") - - def minimize(self) -> "AzureServiceBusTopic": - """ - Return a minimal copy of this AzureServiceBusTopic with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AzureServiceBusTopic with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AzureServiceBusTopic instance with only the minimum required fields. - """ - self.validate() - return AzureServiceBusTopic(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAzureServiceBusTopic": - """ - Create a :class:`RelatedAzureServiceBusTopic` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzureServiceBusTopic reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzureServiceBusTopic(guid=self.guid) - return RelatedAzureServiceBusTopic(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -588,9 +515,6 @@ def _azure_service_bus_topic_to_nested( is_incomplete=azure_service_bus_topic.is_incomplete, provenance_type=azure_service_bus_topic.provenance_type, home_id=azure_service_bus_topic.home_id, - depth=azure_service_bus_topic.depth, - immediate_upstream=azure_service_bus_topic.immediate_upstream, - immediate_downstream=azure_service_bus_topic.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -626,6 +550,7 @@ def _azure_service_bus_topic_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -634,9 +559,6 @@ def _azure_service_bus_topic_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_service_bus_topic_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/bi.py b/pyatlan_v9/model/assets/bi.py index 3d91d8a57..bab3493d7 100644 --- a/pyatlan_v9/model/assets/bi.py +++ b/pyatlan_v9/model/assets/bi.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedBI from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class BI(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "BI" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class BI(Asset): def __post_init__(self) -> None: self.type_name = "BI" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this BI instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"BI validation failed: {errors}") - - def minimize(self) -> "BI": - """ - Return a minimal copy of this BI with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new BI with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new BI instance with only the minimum required fields. - """ - self.validate() - return BI(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedBI": - """ - Create a :class:`RelatedBI` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedBI reference to this asset. - """ - if self.guid is not UNSET: - return RelatedBI(guid=self.guid) - return RelatedBI(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -489,9 +430,6 @@ def _bi_to_nested(bi: BI) -> BINested: is_incomplete=bi.is_incomplete, provenance_type=bi.provenance_type, home_id=bi.home_id, - depth=bi.depth, - immediate_upstream=bi.immediate_upstream, - immediate_downstream=bi.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -521,6 +459,7 @@ def _bi_from_nested(nested: BINested) -> BI: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -529,9 +468,6 @@ def _bi_from_nested(nested: BINested) -> BI: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_bi_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/bi_process.py b/pyatlan_v9/model/assets/bi_process.py index 54a4f5e63..ee7e711e0 100644 --- a/pyatlan_v9/model/assets/bi_process.py +++ b/pyatlan_v9/model/assets/bi_process.py @@ -49,7 +49,7 @@ from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .power_bi_related import RelatedPowerBIDataflow -from .process_related import RelatedBIProcess, RelatedColumnProcess +from .process_related import RelatedColumnProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -106,6 +106,8 @@ class BIProcess(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "BIProcess" + code: Union[str, None, UnsetType] = UNSET """Code that ran within the process.""" @@ -228,66 +230,6 @@ class BIProcess(Asset): def __post_init__(self) -> None: self.type_name = "BIProcess" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this BIProcess instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"BIProcess validation failed: {errors}") - - def minimize(self) -> "BIProcess": - """ - Return a minimal copy of this BIProcess with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new BIProcess with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new BIProcess instance with only the minimum required fields. - """ - self.validate() - return BIProcess(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedBIProcess": - """ - Create a :class:`RelatedBIProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedBIProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedBIProcess(guid=self.guid) - return RelatedBIProcess(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -580,9 +522,6 @@ def _bi_process_to_nested(bi_process: BIProcess) -> BIProcessNested: is_incomplete=bi_process.is_incomplete, provenance_type=bi_process.provenance_type, home_id=bi_process.home_id, - depth=bi_process.depth, - immediate_upstream=bi_process.immediate_upstream, - immediate_downstream=bi_process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -614,6 +553,7 @@ def _bi_process_from_nested(nested: BIProcessNested) -> BIProcess: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -622,9 +562,6 @@ def _bi_process_from_nested(nested: BIProcessNested) -> BIProcess: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_bi_process_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/bigquery_related.py b/pyatlan_v9/model/assets/bigquery_related.py index e77fa73a3..c140f1815 100644 --- a/pyatlan_v9/model/assets/bigquery_related.py +++ b/pyatlan_v9/model/assets/bigquery_related.py @@ -59,19 +59,19 @@ class RelatedBigqueryRoutine(RelatedProcedure): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "BigqueryRoutine" so it serializes correctly - bigquery_routine_type: Union[str, None, UnsetType] = UNSET + bigquery_type: Union[str, None, UnsetType] = UNSET """Type of bigquery routine (sp, udf, or tvf).""" - bigquery_routine_arguments: Union[List[str], None, UnsetType] = UNSET + bigquery_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the routine.""" - bigquery_routine_return_type: Union[str, None, UnsetType] = UNSET + bigquery_return_type: Union[str, None, UnsetType] = UNSET """Return data type of the bigquery routine (null for stored procedures).""" - bigquery_routine_security_type: Union[str, None, UnsetType] = UNSET + bigquery_security_type: Union[str, None, UnsetType] = UNSET """Security type of the routine, always null.""" - bigquery_routine_ddl: Union[str, None, UnsetType] = UNSET + bigquery_ddl: Union[str, None, UnsetType] = UNSET """The ddl statement used to create the bigquery routine.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/bigquery_routine.py b/pyatlan_v9/model/assets/bigquery_routine.py index 00a000e98..859532e32 100644 --- a/pyatlan_v9/model/assets/bigquery_routine.py +++ b/pyatlan_v9/model/assets/bigquery_routine.py @@ -38,7 +38,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .bigquery_related import RelatedBigqueryRoutine from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .dbt_related import ( @@ -71,11 +70,11 @@ class BigqueryRoutine(Asset): Instance of a bigquery routine in atlan. Can be a stored procedure, udf, or tvf. """ - BIGQUERY_ROUTINE_TYPE: ClassVar[Any] = None - BIGQUERY_ROUTINE_ARGUMENTS: ClassVar[Any] = None - BIGQUERY_ROUTINE_RETURN_TYPE: ClassVar[Any] = None - BIGQUERY_ROUTINE_SECURITY_TYPE: ClassVar[Any] = None - BIGQUERY_ROUTINE_DDL: ClassVar[Any] = None + BIGQUERY_TYPE: ClassVar[Any] = None + BIGQUERY_ARGUMENTS: ClassVar[Any] = None + BIGQUERY_RETURN_TYPE: ClassVar[Any] = None + BIGQUERY_SECURITY_TYPE: ClassVar[Any] = None + BIGQUERY_DDL: ClassVar[Any] = None DEFINITION: ClassVar[Any] = None SQL_LANGUAGE: ClassVar[Any] = None SQL_RUNTIME_VERSION: ClassVar[Any] = None @@ -144,19 +143,21 @@ class BigqueryRoutine(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - bigquery_routine_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "BigqueryRoutine" + + bigquery_type: Union[str, None, UnsetType] = UNSET """Type of bigquery routine (sp, udf, or tvf).""" - bigquery_routine_arguments: Union[List[str], None, UnsetType] = UNSET + bigquery_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the routine.""" - bigquery_routine_return_type: Union[str, None, UnsetType] = UNSET + bigquery_return_type: Union[str, None, UnsetType] = UNSET """Return data type of the bigquery routine (null for stored procedures).""" - bigquery_routine_security_type: Union[str, None, UnsetType] = UNSET + bigquery_security_type: Union[str, None, UnsetType] = UNSET """Security type of the routine, always null.""" - bigquery_routine_ddl: Union[str, None, UnsetType] = UNSET + bigquery_ddl: Union[str, None, UnsetType] = UNSET """The ddl statement used to create the bigquery routine.""" definition: Union[str, None, UnsetType] = UNSET @@ -377,69 +378,6 @@ class BigqueryRoutine(Asset): def __post_init__(self) -> None: self.type_name = "BigqueryRoutine" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this BigqueryRoutine instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.definition is UNSET: - errors.append("definition is required for creation") - if errors: - raise ValueError(f"BigqueryRoutine validation failed: {errors}") - - def minimize(self) -> "BigqueryRoutine": - """ - Return a minimal copy of this BigqueryRoutine with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new BigqueryRoutine with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new BigqueryRoutine instance with only the minimum required fields. - """ - self.validate() - return BigqueryRoutine(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedBigqueryRoutine": - """ - Create a :class:`RelatedBigqueryRoutine` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedBigqueryRoutine reference to this asset. - """ - if self.guid is not UNSET: - return RelatedBigqueryRoutine(guid=self.guid) - return RelatedBigqueryRoutine(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -497,19 +435,19 @@ def from_json( class BigqueryRoutineAttributes(AssetAttributes): """BigqueryRoutine-specific attributes for nested API format.""" - bigquery_routine_type: Union[str, None, UnsetType] = UNSET + bigquery_type: Union[str, None, UnsetType] = UNSET """Type of bigquery routine (sp, udf, or tvf).""" - bigquery_routine_arguments: Union[List[str], None, UnsetType] = UNSET + bigquery_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the routine.""" - bigquery_routine_return_type: Union[str, None, UnsetType] = UNSET + bigquery_return_type: Union[str, None, UnsetType] = UNSET """Return data type of the bigquery routine (null for stored procedures).""" - bigquery_routine_security_type: Union[str, None, UnsetType] = UNSET + bigquery_security_type: Union[str, None, UnsetType] = UNSET """Security type of the routine, always null.""" - bigquery_routine_ddl: Union[str, None, UnsetType] = UNSET + bigquery_ddl: Union[str, None, UnsetType] = UNSET """The ddl statement used to create the bigquery routine.""" definition: Union[str, None, UnsetType] = UNSET @@ -798,11 +736,11 @@ def _populate_bigquery_routine_attrs( ) -> None: """Populate BigqueryRoutine-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.bigquery_routine_type = obj.bigquery_routine_type - attrs.bigquery_routine_arguments = obj.bigquery_routine_arguments - attrs.bigquery_routine_return_type = obj.bigquery_routine_return_type - attrs.bigquery_routine_security_type = obj.bigquery_routine_security_type - attrs.bigquery_routine_ddl = obj.bigquery_routine_ddl + attrs.bigquery_type = obj.bigquery_type + attrs.bigquery_arguments = obj.bigquery_arguments + attrs.bigquery_return_type = obj.bigquery_return_type + attrs.bigquery_security_type = obj.bigquery_security_type + attrs.bigquery_ddl = obj.bigquery_ddl attrs.definition = obj.definition attrs.sql_language = obj.sql_language attrs.sql_runtime_version = obj.sql_runtime_version @@ -838,11 +776,11 @@ def _populate_bigquery_routine_attrs( def _extract_bigquery_routine_attrs(attrs: BigqueryRoutineAttributes) -> dict: """Extract all BigqueryRoutine attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["bigquery_routine_type"] = attrs.bigquery_routine_type - result["bigquery_routine_arguments"] = attrs.bigquery_routine_arguments - result["bigquery_routine_return_type"] = attrs.bigquery_routine_return_type - result["bigquery_routine_security_type"] = attrs.bigquery_routine_security_type - result["bigquery_routine_ddl"] = attrs.bigquery_routine_ddl + result["bigquery_type"] = attrs.bigquery_type + result["bigquery_arguments"] = attrs.bigquery_arguments + result["bigquery_return_type"] = attrs.bigquery_return_type + result["bigquery_security_type"] = attrs.bigquery_security_type + result["bigquery_ddl"] = attrs.bigquery_ddl result["definition"] = attrs.definition result["sql_language"] = attrs.sql_language result["sql_runtime_version"] = attrs.sql_runtime_version @@ -915,9 +853,6 @@ def _bigquery_routine_to_nested( is_incomplete=bigquery_routine.is_incomplete, provenance_type=bigquery_routine.provenance_type, home_id=bigquery_routine.home_id, - depth=bigquery_routine.depth, - immediate_upstream=bigquery_routine.immediate_upstream, - immediate_downstream=bigquery_routine.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -951,6 +886,7 @@ def _bigquery_routine_from_nested(nested: BigqueryRoutineNested) -> BigqueryRout updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -959,9 +895,6 @@ def _bigquery_routine_from_nested(nested: BigqueryRoutineNested) -> BigqueryRout is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_bigquery_routine_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -992,21 +925,17 @@ def _bigquery_routine_from_nested_bytes(data: bytes, serde: Serde) -> BigqueryRo RelationField, ) -BigqueryRoutine.BIGQUERY_ROUTINE_TYPE = KeywordField( - "bigqueryRoutineType", "bigqueryRoutineType" -) -BigqueryRoutine.BIGQUERY_ROUTINE_ARGUMENTS = KeywordField( - "bigqueryRoutineArguments", "bigqueryRoutineArguments" -) -BigqueryRoutine.BIGQUERY_ROUTINE_RETURN_TYPE = KeywordField( - "bigqueryRoutineReturnType", "bigqueryRoutineReturnType" +BigqueryRoutine.BIGQUERY_TYPE = KeywordField("bigqueryType", "bigqueryType") +BigqueryRoutine.BIGQUERY_ARGUMENTS = KeywordField( + "bigqueryArguments", "bigqueryArguments" ) -BigqueryRoutine.BIGQUERY_ROUTINE_SECURITY_TYPE = KeywordField( - "bigqueryRoutineSecurityType", "bigqueryRoutineSecurityType" +BigqueryRoutine.BIGQUERY_RETURN_TYPE = KeywordField( + "bigqueryReturnType", "bigqueryReturnType" ) -BigqueryRoutine.BIGQUERY_ROUTINE_DDL = KeywordField( - "bigqueryRoutineDdl", "bigqueryRoutineDdl" +BigqueryRoutine.BIGQUERY_SECURITY_TYPE = KeywordField( + "bigquerySecurityType", "bigquerySecurityType" ) +BigqueryRoutine.BIGQUERY_DDL = KeywordField("bigqueryDdl", "bigqueryDdl") BigqueryRoutine.DEFINITION = KeywordField("definition", "definition") BigqueryRoutine.SQL_LANGUAGE = KeywordTextField( "sqlLanguage", "sqlLanguage", "sqlLanguage.text" diff --git a/pyatlan_v9/model/assets/business_policy.py b/pyatlan_v9/model/assets/business_policy.py index 20b3c070b..fb831cfd0 100644 --- a/pyatlan_v9/model/assets/business_policy.py +++ b/pyatlan_v9/model/assets/business_policy.py @@ -92,6 +92,8 @@ class BusinessPolicy(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "BusinessPolicy" + business_policy_type: Union[str, None, UnsetType] = UNSET """Type of business policy""" @@ -199,66 +201,6 @@ class BusinessPolicy(Asset): def __post_init__(self) -> None: self.type_name = "BusinessPolicy" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this BusinessPolicy instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"BusinessPolicy validation failed: {errors}") - - def minimize(self) -> "BusinessPolicy": - """ - Return a minimal copy of this BusinessPolicy with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new BusinessPolicy with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new BusinessPolicy instance with only the minimum required fields. - """ - self.validate() - return BusinessPolicy(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedBusinessPolicy": - """ - Create a :class:`RelatedBusinessPolicy` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedBusinessPolicy reference to this asset. - """ - if self.guid is not UNSET: - return RelatedBusinessPolicy(guid=self.guid) - return RelatedBusinessPolicy(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -539,9 +481,6 @@ def _business_policy_to_nested(business_policy: BusinessPolicy) -> BusinessPolic is_incomplete=business_policy.is_incomplete, provenance_type=business_policy.provenance_type, home_id=business_policy.home_id, - depth=business_policy.depth, - immediate_upstream=business_policy.immediate_upstream, - immediate_downstream=business_policy.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -575,6 +514,7 @@ def _business_policy_from_nested(nested: BusinessPolicyNested) -> BusinessPolicy updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -583,9 +523,6 @@ def _business_policy_from_nested(nested: BusinessPolicyNested) -> BusinessPolicy is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_business_policy_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/calculation_view.py b/pyatlan_v9/model/assets/calculation_view.py index 1a2266f6d..110955f20 100644 --- a/pyatlan_v9/model/assets/calculation_view.py +++ b/pyatlan_v9/model/assets/calculation_view.py @@ -58,7 +58,7 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_related import RelatedCalculationView, RelatedColumn, RelatedSchema +from .sql_related import RelatedColumn, RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -72,10 +72,10 @@ class CalculationView(Asset): """ COLUMN_COUNT: ClassVar[Any] = None - CALCULATION_VIEW_VERSION_ID: ClassVar[Any] = None - CALCULATION_VIEW_ACTIVATED_BY: ClassVar[Any] = None - CALCULATION_VIEW_ACTIVATED_AT: ClassVar[Any] = None - CALCULATION_VIEW_PACKAGE_ID: ClassVar[Any] = None + SQL_VERSION_ID: ClassVar[Any] = None + SQL_ACTIVATED_BY: ClassVar[Any] = None + SQL_ACTIVATED_AT: ClassVar[Any] = None + SQL_PACKAGE_ID: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -132,19 +132,21 @@ class CalculationView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CalculationView" + column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this calculation view.""" - calculation_view_version_id: Union[int, None, UnsetType] = UNSET + sql_version_id: Union[int, None, UnsetType] = UNSET """The version ID of this calculation view.""" - calculation_view_activated_by: Union[str, None, UnsetType] = UNSET + sql_activated_by: Union[str, None, UnsetType] = UNSET """The owner who activated the calculation view""" - calculation_view_activated_at: Union[int, None, UnsetType] = UNSET + sql_activated_at: Union[int, None, UnsetType] = UNSET """Time at which this calculation view was activated at""" - calculation_view_package_id: Union[str, None, UnsetType] = UNSET + sql_package_id: Union[str, None, UnsetType] = UNSET """The full package id path to which a calculation view belongs/resides in the repository.""" query_count: Union[int, None, UnsetType] = UNSET @@ -337,80 +339,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CalculationView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"CalculationView validation failed: {errors}") - - def minimize(self) -> "CalculationView": - """ - Return a minimal copy of this CalculationView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CalculationView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CalculationView instance with only the minimum required fields. - """ - self.validate() - return CalculationView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCalculationView": - """ - Create a :class:`RelatedCalculationView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCalculationView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCalculationView(guid=self.guid) - return RelatedCalculationView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -471,16 +399,16 @@ class CalculationViewAttributes(AssetAttributes): column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this calculation view.""" - calculation_view_version_id: Union[int, None, UnsetType] = UNSET + sql_version_id: Union[int, None, UnsetType] = UNSET """The version ID of this calculation view.""" - calculation_view_activated_by: Union[str, None, UnsetType] = UNSET + sql_activated_by: Union[str, None, UnsetType] = UNSET """The owner who activated the calculation view""" - calculation_view_activated_at: Union[int, None, UnsetType] = UNSET + sql_activated_at: Union[int, None, UnsetType] = UNSET """Time at which this calculation view was activated at""" - calculation_view_package_id: Union[str, None, UnsetType] = UNSET + sql_package_id: Union[str, None, UnsetType] = UNSET """The full package id path to which a calculation view belongs/resides in the repository.""" query_count: Union[int, None, UnsetType] = UNSET @@ -734,10 +662,10 @@ def _populate_calculation_view_attrs( """Populate CalculationView-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.column_count = obj.column_count - attrs.calculation_view_version_id = obj.calculation_view_version_id - attrs.calculation_view_activated_by = obj.calculation_view_activated_by - attrs.calculation_view_activated_at = obj.calculation_view_activated_at - attrs.calculation_view_package_id = obj.calculation_view_package_id + attrs.sql_version_id = obj.sql_version_id + attrs.sql_activated_by = obj.sql_activated_by + attrs.sql_activated_at = obj.sql_activated_at + attrs.sql_package_id = obj.sql_package_id attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -762,10 +690,10 @@ def _extract_calculation_view_attrs(attrs: CalculationViewAttributes) -> dict: """Extract all CalculationView attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["column_count"] = attrs.column_count - result["calculation_view_version_id"] = attrs.calculation_view_version_id - result["calculation_view_activated_by"] = attrs.calculation_view_activated_by - result["calculation_view_activated_at"] = attrs.calculation_view_activated_at - result["calculation_view_package_id"] = attrs.calculation_view_package_id + result["sql_version_id"] = attrs.sql_version_id + result["sql_activated_by"] = attrs.sql_activated_by + result["sql_activated_at"] = attrs.sql_activated_at + result["sql_package_id"] = attrs.sql_package_id result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -826,9 +754,6 @@ def _calculation_view_to_nested( is_incomplete=calculation_view.is_incomplete, provenance_type=calculation_view.provenance_type, home_id=calculation_view.home_id, - depth=calculation_view.depth, - immediate_upstream=calculation_view.immediate_upstream, - immediate_downstream=calculation_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -862,6 +787,7 @@ def _calculation_view_from_nested(nested: CalculationViewNested) -> CalculationV updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -870,9 +796,6 @@ def _calculation_view_from_nested(nested: CalculationViewNested) -> CalculationV is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_calculation_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -903,18 +826,10 @@ def _calculation_view_from_nested_bytes(data: bytes, serde: Serde) -> Calculatio ) CalculationView.COLUMN_COUNT = NumericField("columnCount", "columnCount") -CalculationView.CALCULATION_VIEW_VERSION_ID = NumericField( - "calculationViewVersionId", "calculationViewVersionId" -) -CalculationView.CALCULATION_VIEW_ACTIVATED_BY = KeywordField( - "calculationViewActivatedBy", "calculationViewActivatedBy" -) -CalculationView.CALCULATION_VIEW_ACTIVATED_AT = NumericField( - "calculationViewActivatedAt", "calculationViewActivatedAt" -) -CalculationView.CALCULATION_VIEW_PACKAGE_ID = KeywordField( - "calculationViewPackageId", "calculationViewPackageId" -) +CalculationView.SQL_VERSION_ID = NumericField("sqlVersionId", "sqlVersionId") +CalculationView.SQL_ACTIVATED_BY = KeywordField("sqlActivatedBy", "sqlActivatedBy") +CalculationView.SQL_ACTIVATED_AT = NumericField("sqlActivatedAt", "sqlActivatedAt") +CalculationView.SQL_PACKAGE_ID = KeywordField("sqlPackageId", "sqlPackageId") CalculationView.QUERY_COUNT = NumericField("queryCount", "queryCount") CalculationView.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") CalculationView.QUERY_USER_MAP = KeywordField("queryUserMap", "queryUserMap") diff --git a/pyatlan_v9/model/assets/cassandra.py b/pyatlan_v9/model/assets/cassandra.py index 28857ddbd..64b62a4a9 100644 --- a/pyatlan_v9/model/assets/cassandra.py +++ b/pyatlan_v9/model/assets/cassandra.py @@ -38,7 +38,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import RelatedCassandra from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -98,6 +97,8 @@ class Cassandra(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cassandra" + cassandra_keyspace_name: Union[str, None, UnsetType] = UNSET """Name of the keyspace for the Cassandra asset.""" @@ -213,66 +214,6 @@ class Cassandra(Asset): def __post_init__(self) -> None: self.type_name = "Cassandra" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cassandra instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cassandra validation failed: {errors}") - - def minimize(self) -> "Cassandra": - """ - Return a minimal copy of this Cassandra with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cassandra with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cassandra instance with only the minimum required fields. - """ - self.validate() - return Cassandra(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandra": - """ - Create a :class:`RelatedCassandra` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandra reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandra(guid=self.guid) - return RelatedCassandra(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -551,9 +492,6 @@ def _cassandra_to_nested(cassandra: Cassandra) -> CassandraNested: is_incomplete=cassandra.is_incomplete, provenance_type=cassandra.provenance_type, home_id=cassandra.home_id, - depth=cassandra.depth, - immediate_upstream=cassandra.immediate_upstream, - immediate_downstream=cassandra.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -585,6 +523,7 @@ def _cassandra_from_nested(nested: CassandraNested) -> Cassandra: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -593,9 +532,6 @@ def _cassandra_from_nested(nested: CassandraNested) -> Cassandra: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cassandra_column.py b/pyatlan_v9/model/assets/cassandra_column.py index ac89d310f..db388a401 100644 --- a/pyatlan_v9/model/assets/cassandra_column.py +++ b/pyatlan_v9/model/assets/cassandra_column.py @@ -39,11 +39,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import ( - RelatedCassandraColumn, - RelatedCassandraTable, - RelatedCassandraView, -) +from .cassandra_related import RelatedCassandraTable, RelatedCassandraView from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -112,6 +108,8 @@ class CassandraColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraColumn" + cassandra_column_clustering_order: Union[str, None, UnsetType] = UNSET """Clustering order of the CassandraColumn.""" @@ -262,78 +260,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cassandra_table is UNSET: - errors.append("cassandra_table is required for creation") - if self.cassandra_table_name is UNSET: - errors.append("cassandra_table_name is required for creation") - if self.cassandra_table_qualified_name is UNSET: - errors.append("cassandra_table_qualified_name is required for creation") - if self.cassandra_keyspace_name is UNSET: - errors.append("cassandra_keyspace_name is required for creation") - if errors: - raise ValueError(f"CassandraColumn validation failed: {errors}") - - def minimize(self) -> "CassandraColumn": - """ - Return a minimal copy of this CassandraColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraColumn instance with only the minimum required fields. - """ - self.validate() - return CassandraColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraColumn": - """ - Create a :class:`RelatedCassandraColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraColumn(guid=self.guid) - return RelatedCassandraColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -671,9 +597,6 @@ def _cassandra_column_to_nested( is_incomplete=cassandra_column.is_incomplete, provenance_type=cassandra_column.provenance_type, home_id=cassandra_column.home_id, - depth=cassandra_column.depth, - immediate_upstream=cassandra_column.immediate_upstream, - immediate_downstream=cassandra_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -707,6 +630,7 @@ def _cassandra_column_from_nested(nested: CassandraColumnNested) -> CassandraCol updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -715,9 +639,6 @@ def _cassandra_column_from_nested(nested: CassandraColumnNested) -> CassandraCol is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cassandra_index.py b/pyatlan_v9/model/assets/cassandra_index.py index 8d6f9f01f..fabda623c 100644 --- a/pyatlan_v9/model/assets/cassandra_index.py +++ b/pyatlan_v9/model/assets/cassandra_index.py @@ -39,7 +39,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import RelatedCassandraIndex, RelatedCassandraTable +from .cassandra_related import RelatedCassandraTable from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -103,6 +103,8 @@ class CassandraIndex(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraIndex" + cassandra_index_kind: Union[str, None, UnsetType] = UNSET """Kind of index (e.g. COMPOSITES).""" @@ -238,78 +240,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraIndex instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cassandra_table is UNSET: - errors.append("cassandra_table is required for creation") - if self.cassandra_table_name is UNSET: - errors.append("cassandra_table_name is required for creation") - if self.cassandra_table_qualified_name is UNSET: - errors.append("cassandra_table_qualified_name is required for creation") - if self.cassandra_keyspace_name is UNSET: - errors.append("cassandra_keyspace_name is required for creation") - if errors: - raise ValueError(f"CassandraIndex validation failed: {errors}") - - def minimize(self) -> "CassandraIndex": - """ - Return a minimal copy of this CassandraIndex with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraIndex with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraIndex instance with only the minimum required fields. - """ - self.validate() - return CassandraIndex(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraIndex": - """ - Create a :class:`RelatedCassandraIndex` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraIndex reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraIndex(guid=self.guid) - return RelatedCassandraIndex(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -613,9 +543,6 @@ def _cassandra_index_to_nested(cassandra_index: CassandraIndex) -> CassandraInde is_incomplete=cassandra_index.is_incomplete, provenance_type=cassandra_index.provenance_type, home_id=cassandra_index.home_id, - depth=cassandra_index.depth, - immediate_upstream=cassandra_index.immediate_upstream, - immediate_downstream=cassandra_index.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -649,6 +576,7 @@ def _cassandra_index_from_nested(nested: CassandraIndexNested) -> CassandraIndex updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -657,9 +585,6 @@ def _cassandra_index_from_nested(nested: CassandraIndexNested) -> CassandraIndex is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_index_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cassandra_keyspace.py b/pyatlan_v9/model/assets/cassandra_keyspace.py index d948fd076..babcd9bd8 100644 --- a/pyatlan_v9/model/assets/cassandra_keyspace.py +++ b/pyatlan_v9/model/assets/cassandra_keyspace.py @@ -38,11 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import ( - RelatedCassandraKeyspace, - RelatedCassandraTable, - RelatedCassandraView, -) +from .cassandra_related import RelatedCassandraTable, RelatedCassandraView from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -108,6 +104,8 @@ class CassandraKeyspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraKeyspace" + cassandra_keyspace_durable_writes: Union[bool, None, UnsetType] = UNSET """Indicates whether durable writes are enabled for the CassandraKeyspace.""" @@ -241,66 +239,6 @@ class CassandraKeyspace(Asset): def __post_init__(self) -> None: self.type_name = "CassandraKeyspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraKeyspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CassandraKeyspace validation failed: {errors}") - - def minimize(self) -> "CassandraKeyspace": - """ - Return a minimal copy of this CassandraKeyspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraKeyspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraKeyspace instance with only the minimum required fields. - """ - self.validate() - return CassandraKeyspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraKeyspace": - """ - Create a :class:`RelatedCassandraKeyspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraKeyspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraKeyspace(guid=self.guid) - return RelatedCassandraKeyspace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -619,9 +557,6 @@ def _cassandra_keyspace_to_nested( is_incomplete=cassandra_keyspace.is_incomplete, provenance_type=cassandra_keyspace.provenance_type, home_id=cassandra_keyspace.home_id, - depth=cassandra_keyspace.depth, - immediate_upstream=cassandra_keyspace.immediate_upstream, - immediate_downstream=cassandra_keyspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -657,6 +592,7 @@ def _cassandra_keyspace_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -665,9 +601,6 @@ def _cassandra_keyspace_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_keyspace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cassandra_table.py b/pyatlan_v9/model/assets/cassandra_table.py index bf35d5d9c..2a82710f8 100644 --- a/pyatlan_v9/model/assets/cassandra_table.py +++ b/pyatlan_v9/model/assets/cassandra_table.py @@ -43,7 +43,6 @@ RelatedCassandraColumn, RelatedCassandraIndex, RelatedCassandraKeyspace, - RelatedCassandraTable, ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric @@ -125,6 +124,8 @@ class CassandraTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraTable" + cassandra_table_bloom_filter_fp_chance: Union[float, None, UnsetType] = ( msgspec.field(default=UNSET, name="cassandraTableBloomFilterFPChance") ) @@ -319,74 +320,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cassandra_keyspace is UNSET: - errors.append("cassandra_keyspace is required for creation") - if self.cassandra_keyspace_name is UNSET: - errors.append("cassandra_keyspace_name is required for creation") - if errors: - raise ValueError(f"CassandraTable validation failed: {errors}") - - def minimize(self) -> "CassandraTable": - """ - Return a minimal copy of this CassandraTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraTable instance with only the minimum required fields. - """ - self.validate() - return CassandraTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraTable": - """ - Create a :class:`RelatedCassandraTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraTable(guid=self.guid) - return RelatedCassandraTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -803,9 +736,6 @@ def _cassandra_table_to_nested(cassandra_table: CassandraTable) -> CassandraTabl is_incomplete=cassandra_table.is_incomplete, provenance_type=cassandra_table.provenance_type, home_id=cassandra_table.home_id, - depth=cassandra_table.depth, - immediate_upstream=cassandra_table.immediate_upstream, - immediate_downstream=cassandra_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -839,6 +769,7 @@ def _cassandra_table_from_nested(nested: CassandraTableNested) -> CassandraTable updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -847,9 +778,6 @@ def _cassandra_table_from_nested(nested: CassandraTableNested) -> CassandraTable is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_table_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cassandra_view.py b/pyatlan_v9/model/assets/cassandra_view.py index 6a630feea..e914798c6 100644 --- a/pyatlan_v9/model/assets/cassandra_view.py +++ b/pyatlan_v9/model/assets/cassandra_view.py @@ -39,11 +39,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import ( - RelatedCassandraColumn, - RelatedCassandraKeyspace, - RelatedCassandraView, -) +from .cassandra_related import RelatedCassandraColumn, RelatedCassandraKeyspace from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -122,6 +118,8 @@ class CassandraView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraView" + cassandra_view_table_id: Union[str, None, UnsetType] = UNSET """ID of the base table in the CassandraView.""" @@ -312,74 +310,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cassandra_keyspace is UNSET: - errors.append("cassandra_keyspace is required for creation") - if self.cassandra_keyspace_name is UNSET: - errors.append("cassandra_keyspace_name is required for creation") - if errors: - raise ValueError(f"CassandraView validation failed: {errors}") - - def minimize(self) -> "CassandraView": - """ - Return a minimal copy of this CassandraView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraView instance with only the minimum required fields. - """ - self.validate() - return CassandraView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraView": - """ - Create a :class:`RelatedCassandraView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraView(guid=self.guid) - return RelatedCassandraView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -787,9 +717,6 @@ def _cassandra_view_to_nested(cassandra_view: CassandraView) -> CassandraViewNes is_incomplete=cassandra_view.is_incomplete, provenance_type=cassandra_view.provenance_type, home_id=cassandra_view.home_id, - depth=cassandra_view.depth, - immediate_upstream=cassandra_view.immediate_upstream, - immediate_downstream=cassandra_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -823,6 +750,7 @@ def _cassandra_view_from_nested(nested: CassandraViewNested) -> CassandraView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -831,9 +759,6 @@ def _cassandra_view_from_nested(nested: CassandraViewNested) -> CassandraView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_view_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/catalog.py b/pyatlan_v9/model/assets/catalog.py index a5c8d6675..70442a3a4 100644 --- a/pyatlan_v9/model/assets/catalog.py +++ b/pyatlan_v9/model/assets/catalog.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedCatalog from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class Catalog(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Catalog" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class Catalog(Asset): def __post_init__(self) -> None: self.type_name = "Catalog" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Catalog instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Catalog validation failed: {errors}") - - def minimize(self) -> "Catalog": - """ - Return a minimal copy of this Catalog with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Catalog with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Catalog instance with only the minimum required fields. - """ - self.validate() - return Catalog(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCatalog": - """ - Create a :class:`RelatedCatalog` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCatalog reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCatalog(guid=self.guid) - return RelatedCatalog(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _catalog_to_nested(catalog: Catalog) -> CatalogNested: is_incomplete=catalog.is_incomplete, provenance_type=catalog.provenance_type, home_id=catalog.home_id, - depth=catalog.depth, - immediate_upstream=catalog.immediate_upstream, - immediate_downstream=catalog.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -525,6 +463,7 @@ def _catalog_from_nested(nested: CatalogNested) -> Catalog: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -533,9 +472,6 @@ def _catalog_from_nested(nested: CatalogNested) -> Catalog: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_catalog_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cloud.py b/pyatlan_v9/model/assets/cloud.py index 50f8b40ef..70458c214 100644 --- a/pyatlan_v9/model/assets/cloud.py +++ b/pyatlan_v9/model/assets/cloud.py @@ -36,7 +36,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cloud_related import RelatedCloud from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -77,6 +76,8 @@ class Cloud(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cloud" + cloud_uniform_resource_name: Union[str, None, UnsetType] = UNSET """Uniform resource name (URN) for the asset: AWS ARN, Google Cloud URI, Azure resource ID, Oracle OCID, and so on.""" @@ -143,66 +144,6 @@ class Cloud(Asset): def __post_init__(self) -> None: self.type_name = "Cloud" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cloud instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cloud validation failed: {errors}") - - def minimize(self) -> "Cloud": - """ - Return a minimal copy of this Cloud with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cloud with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cloud instance with only the minimum required fields. - """ - self.validate() - return Cloud(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCloud": - """ - Create a :class:`RelatedCloud` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCloud reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCloud(guid=self.guid) - return RelatedCloud(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -412,9 +353,6 @@ def _cloud_to_nested(cloud: Cloud) -> CloudNested: is_incomplete=cloud.is_incomplete, provenance_type=cloud.provenance_type, home_id=cloud.home_id, - depth=cloud.depth, - immediate_upstream=cloud.immediate_upstream, - immediate_downstream=cloud.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -444,6 +382,7 @@ def _cloud_from_nested(nested: CloudNested) -> Cloud: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -452,9 +391,6 @@ def _cloud_from_nested(nested: CloudNested) -> Cloud: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cloud_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cloud_related.py b/pyatlan_v9/model/assets/cloud_related.py index 957e6c0e1..ab23076ed 100644 --- a/pyatlan_v9/model/assets/cloud_related.py +++ b/pyatlan_v9/model/assets/cloud_related.py @@ -132,7 +132,7 @@ class RelatedGoogle(RelatedCloud): google_project_id: Union[str, None, UnsetType] = UNSET """ID of the project in which the asset exists.""" - google_project_number: Union[int, None, UnsetType] = UNSET + cloud_project_number: Union[int, None, UnsetType] = UNSET """Number of the project in which the asset exists.""" google_location: Union[str, None, UnsetType] = UNSET diff --git a/pyatlan_v9/model/assets/cognite.py b/pyatlan_v9/model/assets/cognite.py index f7db21380..dfa99ccb5 100644 --- a/pyatlan_v9/model/assets/cognite.py +++ b/pyatlan_v9/model/assets/cognite.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCognite from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class Cognite(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cognite" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class Cognite(Asset): def __post_init__(self) -> None: self.type_name = "Cognite" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cognite instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cognite validation failed: {errors}") - - def minimize(self) -> "Cognite": - """ - Return a minimal copy of this Cognite with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cognite with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cognite instance with only the minimum required fields. - """ - self.validate() - return Cognite(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognite": - """ - Create a :class:`RelatedCognite` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognite reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognite(guid=self.guid) - return RelatedCognite(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _cognite_to_nested(cognite: Cognite) -> CogniteNested: is_incomplete=cognite.is_incomplete, provenance_type=cognite.provenance_type, home_id=cognite.home_id, - depth=cognite.depth, - immediate_upstream=cognite.immediate_upstream, - immediate_downstream=cognite.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -525,6 +463,7 @@ def _cognite_from_nested(nested: CogniteNested) -> Cognite: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -533,9 +472,6 @@ def _cognite_from_nested(nested: CogniteNested) -> Cognite: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognite3d_model.py b/pyatlan_v9/model/assets/cognite3d_model.py index 639beac00..8a330c2c8 100644 --- a/pyatlan_v9/model/assets/cognite3d_model.py +++ b/pyatlan_v9/model/assets/cognite3d_model.py @@ -38,7 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCognite3DModel, RelatedCogniteAsset +from .cognite_related import RelatedCogniteAsset from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -93,6 +93,8 @@ class Cognite3DModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cognite3DModel" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -197,72 +199,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cognite3DModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"Cognite3DModel validation failed: {errors}") - - def minimize(self) -> "Cognite3DModel": - """ - Return a minimal copy of this Cognite3DModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cognite3DModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cognite3DModel instance with only the minimum required fields. - """ - self.validate() - return Cognite3DModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognite3DModel": - """ - Create a :class:`RelatedCognite3DModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognite3DModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognite3DModel(guid=self.guid) - return RelatedCognite3DModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -520,9 +456,6 @@ def _cognite3d_model_to_nested(cognite3d_model: Cognite3DModel) -> Cognite3DMode is_incomplete=cognite3d_model.is_incomplete, provenance_type=cognite3d_model.provenance_type, home_id=cognite3d_model.home_id, - depth=cognite3d_model.depth, - immediate_upstream=cognite3d_model.immediate_upstream, - immediate_downstream=cognite3d_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -556,6 +489,7 @@ def _cognite3d_model_from_nested(nested: Cognite3DModelNested) -> Cognite3DModel updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -564,9 +498,6 @@ def _cognite3d_model_from_nested(nested: Cognite3DModelNested) -> Cognite3DModel is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite3d_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognite_asset.py b/pyatlan_v9/model/assets/cognite_asset.py index 92cbc28d5..a5b8769d3 100644 --- a/pyatlan_v9/model/assets/cognite_asset.py +++ b/pyatlan_v9/model/assets/cognite_asset.py @@ -39,7 +39,6 @@ ) from .cognite_related import ( RelatedCognite3DModel, - RelatedCogniteAsset, RelatedCogniteEvent, RelatedCogniteFile, RelatedCogniteSequence, @@ -103,6 +102,8 @@ class CogniteAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteAsset" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -213,66 +214,6 @@ class CogniteAsset(Asset): def __post_init__(self) -> None: self.type_name = "CogniteAsset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CogniteAsset validation failed: {errors}") - - def minimize(self) -> "CogniteAsset": - """ - Return a minimal copy of this CogniteAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteAsset instance with only the minimum required fields. - """ - self.validate() - return CogniteAsset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteAsset": - """ - Create a :class:`RelatedCogniteAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteAsset(guid=self.guid) - return RelatedCogniteAsset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -544,9 +485,6 @@ def _cognite_asset_to_nested(cognite_asset: CogniteAsset) -> CogniteAssetNested: is_incomplete=cognite_asset.is_incomplete, provenance_type=cognite_asset.provenance_type, home_id=cognite_asset.home_id, - depth=cognite_asset.depth, - immediate_upstream=cognite_asset.immediate_upstream, - immediate_downstream=cognite_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -580,6 +518,7 @@ def _cognite_asset_from_nested(nested: CogniteAssetNested) -> CogniteAsset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -588,9 +527,6 @@ def _cognite_asset_from_nested(nested: CogniteAssetNested) -> CogniteAsset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_asset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognite_event.py b/pyatlan_v9/model/assets/cognite_event.py index 42c4050eb..8dbd9c7a8 100644 --- a/pyatlan_v9/model/assets/cognite_event.py +++ b/pyatlan_v9/model/assets/cognite_event.py @@ -38,7 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCogniteAsset, RelatedCogniteEvent +from .cognite_related import RelatedCogniteAsset from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -93,6 +93,8 @@ class CogniteEvent(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteEvent" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -197,72 +199,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteEvent instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"CogniteEvent validation failed: {errors}") - - def minimize(self) -> "CogniteEvent": - """ - Return a minimal copy of this CogniteEvent with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteEvent with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteEvent instance with only the minimum required fields. - """ - self.validate() - return CogniteEvent(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteEvent": - """ - Create a :class:`RelatedCogniteEvent` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteEvent reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteEvent(guid=self.guid) - return RelatedCogniteEvent(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -518,9 +454,6 @@ def _cognite_event_to_nested(cognite_event: CogniteEvent) -> CogniteEventNested: is_incomplete=cognite_event.is_incomplete, provenance_type=cognite_event.provenance_type, home_id=cognite_event.home_id, - depth=cognite_event.depth, - immediate_upstream=cognite_event.immediate_upstream, - immediate_downstream=cognite_event.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -554,6 +487,7 @@ def _cognite_event_from_nested(nested: CogniteEventNested) -> CogniteEvent: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -562,9 +496,6 @@ def _cognite_event_from_nested(nested: CogniteEventNested) -> CogniteEvent: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_event_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognite_file.py b/pyatlan_v9/model/assets/cognite_file.py index 2cdfd0d00..4f20abfa3 100644 --- a/pyatlan_v9/model/assets/cognite_file.py +++ b/pyatlan_v9/model/assets/cognite_file.py @@ -38,7 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCogniteAsset, RelatedCogniteFile +from .cognite_related import RelatedCogniteAsset from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -93,6 +93,8 @@ class CogniteFile(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteFile" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -197,72 +199,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteFile instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"CogniteFile validation failed: {errors}") - - def minimize(self) -> "CogniteFile": - """ - Return a minimal copy of this CogniteFile with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteFile with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteFile instance with only the minimum required fields. - """ - self.validate() - return CogniteFile(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteFile": - """ - Create a :class:`RelatedCogniteFile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteFile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteFile(guid=self.guid) - return RelatedCogniteFile(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -516,9 +452,6 @@ def _cognite_file_to_nested(cognite_file: CogniteFile) -> CogniteFileNested: is_incomplete=cognite_file.is_incomplete, provenance_type=cognite_file.provenance_type, home_id=cognite_file.home_id, - depth=cognite_file.depth, - immediate_upstream=cognite_file.immediate_upstream, - immediate_downstream=cognite_file.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -550,6 +483,7 @@ def _cognite_file_from_nested(nested: CogniteFileNested) -> CogniteFile: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -558,9 +492,6 @@ def _cognite_file_from_nested(nested: CogniteFileNested) -> CogniteFile: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_file_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognite_sequence.py b/pyatlan_v9/model/assets/cognite_sequence.py index f0875afa8..d7c4360da 100644 --- a/pyatlan_v9/model/assets/cognite_sequence.py +++ b/pyatlan_v9/model/assets/cognite_sequence.py @@ -38,7 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCogniteAsset, RelatedCogniteSequence +from .cognite_related import RelatedCogniteAsset from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -93,6 +93,8 @@ class CogniteSequence(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteSequence" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -197,72 +199,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteSequence instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"CogniteSequence validation failed: {errors}") - - def minimize(self) -> "CogniteSequence": - """ - Return a minimal copy of this CogniteSequence with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteSequence with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteSequence instance with only the minimum required fields. - """ - self.validate() - return CogniteSequence(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteSequence": - """ - Create a :class:`RelatedCogniteSequence` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteSequence reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteSequence(guid=self.guid) - return RelatedCogniteSequence(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -524,9 +460,6 @@ def _cognite_sequence_to_nested( is_incomplete=cognite_sequence.is_incomplete, provenance_type=cognite_sequence.provenance_type, home_id=cognite_sequence.home_id, - depth=cognite_sequence.depth, - immediate_upstream=cognite_sequence.immediate_upstream, - immediate_downstream=cognite_sequence.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -560,6 +493,7 @@ def _cognite_sequence_from_nested(nested: CogniteSequenceNested) -> CogniteSeque updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -568,9 +502,6 @@ def _cognite_sequence_from_nested(nested: CogniteSequenceNested) -> CogniteSeque is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_sequence_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognite_time_series.py b/pyatlan_v9/model/assets/cognite_time_series.py index d456a27ed..d54f14d22 100644 --- a/pyatlan_v9/model/assets/cognite_time_series.py +++ b/pyatlan_v9/model/assets/cognite_time_series.py @@ -38,7 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCogniteAsset, RelatedCogniteTimeSeries +from .cognite_related import RelatedCogniteAsset from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -93,6 +93,8 @@ class CogniteTimeSeries(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteTimeSeries" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -197,72 +199,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteTimeSeries instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"CogniteTimeSeries validation failed: {errors}") - - def minimize(self) -> "CogniteTimeSeries": - """ - Return a minimal copy of this CogniteTimeSeries with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteTimeSeries with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteTimeSeries instance with only the minimum required fields. - """ - self.validate() - return CogniteTimeSeries(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteTimeSeries": - """ - Create a :class:`RelatedCogniteTimeSeries` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteTimeSeries reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteTimeSeries(guid=self.guid) - return RelatedCogniteTimeSeries(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -524,9 +460,6 @@ def _cognite_time_series_to_nested( is_incomplete=cognite_time_series.is_incomplete, provenance_type=cognite_time_series.provenance_type, home_id=cognite_time_series.home_id, - depth=cognite_time_series.depth, - immediate_upstream=cognite_time_series.immediate_upstream, - immediate_downstream=cognite_time_series.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -562,6 +495,7 @@ def _cognite_time_series_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -570,9 +504,6 @@ def _cognite_time_series_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_time_series_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognos.py b/pyatlan_v9/model/assets/cognos.py index eecf8a1e4..c0ca7ed57 100644 --- a/pyatlan_v9/model/assets/cognos.py +++ b/pyatlan_v9/model/assets/cognos.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import RelatedCognos from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -100,6 +99,8 @@ class Cognos(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cognos" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -222,66 +223,6 @@ class Cognos(Asset): def __post_init__(self) -> None: self.type_name = "Cognos" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cognos instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cognos validation failed: {errors}") - - def minimize(self) -> "Cognos": - """ - Return a minimal copy of this Cognos with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cognos with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cognos instance with only the minimum required fields. - """ - self.validate() - return Cognos(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognos": - """ - Create a :class:`RelatedCognos` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognos reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognos(guid=self.guid) - return RelatedCognos(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -573,9 +514,6 @@ def _cognos_to_nested(cognos: Cognos) -> CognosNested: is_incomplete=cognos.is_incomplete, provenance_type=cognos.provenance_type, home_id=cognos.home_id, - depth=cognos.depth, - immediate_upstream=cognos.immediate_upstream, - immediate_downstream=cognos.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -605,6 +543,7 @@ def _cognos_from_nested(nested: CognosNested) -> Cognos: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -613,9 +552,6 @@ def _cognos_from_nested(nested: CognosNested) -> Cognos: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognos_column.py b/pyatlan_v9/model/assets/cognos_column.py index ff96cebc6..d51ee09aa 100644 --- a/pyatlan_v9/model/assets/cognos_column.py +++ b/pyatlan_v9/model/assets/cognos_column.py @@ -39,7 +39,6 @@ _populate_asset_attrs, ) from .cognos_related import ( - RelatedCognosColumn, RelatedCognosDashboard, RelatedCognosDataset, RelatedCognosExploration, @@ -71,9 +70,9 @@ class CognosColumn(Asset): Instance of a Cognos column in Atlan. """ - COGNOS_COLUMN_DATATYPE: ClassVar[Any] = None - COGNOS_COLUMN_NULLABLE: ClassVar[Any] = None - COGNOS_COLUMN_REGULAR_AGGREGATE: ClassVar[Any] = None + COGNOS_DATATYPE: ClassVar[Any] = None + COGNOS_NULLABLE: ClassVar[Any] = None + COGNOS_REGULAR_AGGREGATE: ClassVar[Any] = None COGNOS_ID: ClassVar[Any] = None COGNOS_PATH: ClassVar[Any] = None COGNOS_PARENT_NAME: ClassVar[Any] = None @@ -118,13 +117,15 @@ class CognosColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - cognos_column_datatype: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "CognosColumn" + + cognos_datatype: Union[str, None, UnsetType] = UNSET """Data type of the CognosColumn.""" - cognos_column_nullable: Union[str, None, UnsetType] = UNSET + cognos_nullable: Union[str, None, UnsetType] = UNSET """Whether the CognosColumn is nullable.""" - cognos_column_regular_aggregate: Union[str, None, UnsetType] = UNSET + cognos_regular_aggregate: Union[str, None, UnsetType] = UNSET """How data should be summarized when aggregated across different dimensions or groupings.""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -275,72 +276,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_dataset is UNSET: - errors.append("cognos_dataset is required for creation") - if errors: - raise ValueError(f"CognosColumn validation failed: {errors}") - - def minimize(self) -> "CognosColumn": - """ - Return a minimal copy of this CognosColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosColumn instance with only the minimum required fields. - """ - self.validate() - return CognosColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosColumn": - """ - Create a :class:`RelatedCognosColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosColumn(guid=self.guid) - return RelatedCognosColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -396,13 +331,13 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> CognosColum class CognosColumnAttributes(AssetAttributes): """CognosColumn-specific attributes for nested API format.""" - cognos_column_datatype: Union[str, None, UnsetType] = UNSET + cognos_datatype: Union[str, None, UnsetType] = UNSET """Data type of the CognosColumn.""" - cognos_column_nullable: Union[str, None, UnsetType] = UNSET + cognos_nullable: Union[str, None, UnsetType] = UNSET """Whether the CognosColumn is nullable.""" - cognos_column_regular_aggregate: Union[str, None, UnsetType] = UNSET + cognos_regular_aggregate: Union[str, None, UnsetType] = UNSET """How data should be summarized when aggregated across different dimensions or groupings.""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -610,9 +545,9 @@ def _populate_cognos_column_attrs( ) -> None: """Populate CognosColumn-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.cognos_column_datatype = obj.cognos_column_datatype - attrs.cognos_column_nullable = obj.cognos_column_nullable - attrs.cognos_column_regular_aggregate = obj.cognos_column_regular_aggregate + attrs.cognos_datatype = obj.cognos_datatype + attrs.cognos_nullable = obj.cognos_nullable + attrs.cognos_regular_aggregate = obj.cognos_regular_aggregate attrs.cognos_id = obj.cognos_id attrs.cognos_path = obj.cognos_path attrs.cognos_parent_name = obj.cognos_parent_name @@ -627,9 +562,9 @@ def _populate_cognos_column_attrs( def _extract_cognos_column_attrs(attrs: CognosColumnAttributes) -> dict: """Extract all CognosColumn attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["cognos_column_datatype"] = attrs.cognos_column_datatype - result["cognos_column_nullable"] = attrs.cognos_column_nullable - result["cognos_column_regular_aggregate"] = attrs.cognos_column_regular_aggregate + result["cognos_datatype"] = attrs.cognos_datatype + result["cognos_nullable"] = attrs.cognos_nullable + result["cognos_regular_aggregate"] = attrs.cognos_regular_aggregate result["cognos_id"] = attrs.cognos_id result["cognos_path"] = attrs.cognos_path result["cognos_parent_name"] = attrs.cognos_parent_name @@ -675,9 +610,6 @@ def _cognos_column_to_nested(cognos_column: CognosColumn) -> CognosColumnNested: is_incomplete=cognos_column.is_incomplete, provenance_type=cognos_column.provenance_type, home_id=cognos_column.home_id, - depth=cognos_column.depth, - immediate_upstream=cognos_column.immediate_upstream, - immediate_downstream=cognos_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -711,6 +643,7 @@ def _cognos_column_from_nested(nested: CognosColumnNested) -> CognosColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -719,9 +652,6 @@ def _cognos_column_from_nested(nested: CognosColumnNested) -> CognosColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -749,14 +679,10 @@ def _cognos_column_from_nested_bytes(data: bytes, serde: Serde) -> CognosColumn: RelationField, ) -CognosColumn.COGNOS_COLUMN_DATATYPE = KeywordField( - "cognosColumnDatatype", "cognosColumnDatatype" -) -CognosColumn.COGNOS_COLUMN_NULLABLE = KeywordField( - "cognosColumnNullable", "cognosColumnNullable" -) -CognosColumn.COGNOS_COLUMN_REGULAR_AGGREGATE = KeywordField( - "cognosColumnRegularAggregate", "cognosColumnRegularAggregate" +CognosColumn.COGNOS_DATATYPE = KeywordField("cognosDatatype", "cognosDatatype") +CognosColumn.COGNOS_NULLABLE = KeywordField("cognosNullable", "cognosNullable") +CognosColumn.COGNOS_REGULAR_AGGREGATE = KeywordField( + "cognosRegularAggregate", "cognosRegularAggregate" ) CognosColumn.COGNOS_ID = KeywordField("cognosId", "cognosId") CognosColumn.COGNOS_PATH = KeywordField("cognosPath", "cognosPath") diff --git a/pyatlan_v9/model/assets/cognos_dashboard.py b/pyatlan_v9/model/assets/cognos_dashboard.py index 5a21bbff4..234b9172a 100644 --- a/pyatlan_v9/model/assets/cognos_dashboard.py +++ b/pyatlan_v9/model/assets/cognos_dashboard.py @@ -38,11 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosDashboard, - RelatedCognosFolder, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -107,6 +103,8 @@ class CognosDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosDashboard" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -243,72 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosDashboard validation failed: {errors}") - - def minimize(self) -> "CognosDashboard": - """ - Return a minimal copy of this CognosDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosDashboard instance with only the minimum required fields. - """ - self.validate() - return CognosDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosDashboard": - """ - Create a :class:`RelatedCognosDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosDashboard(guid=self.guid) - return RelatedCognosDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -618,9 +550,6 @@ def _cognos_dashboard_to_nested( is_incomplete=cognos_dashboard.is_incomplete, provenance_type=cognos_dashboard.provenance_type, home_id=cognos_dashboard.home_id, - depth=cognos_dashboard.depth, - immediate_upstream=cognos_dashboard.immediate_upstream, - immediate_downstream=cognos_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -654,6 +583,7 @@ def _cognos_dashboard_from_nested(nested: CognosDashboardNested) -> CognosDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -662,9 +592,6 @@ def _cognos_dashboard_from_nested(nested: CognosDashboardNested) -> CognosDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognos_dataset.py b/pyatlan_v9/model/assets/cognos_dataset.py index 2ce452d17..e93ea8b33 100644 --- a/pyatlan_v9/model/assets/cognos_dataset.py +++ b/pyatlan_v9/model/assets/cognos_dataset.py @@ -38,11 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosDataset, - RelatedCognosFolder, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -107,6 +103,8 @@ class CognosDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosDataset" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -243,72 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosDataset validation failed: {errors}") - - def minimize(self) -> "CognosDataset": - """ - Return a minimal copy of this CognosDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosDataset instance with only the minimum required fields. - """ - self.validate() - return CognosDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosDataset": - """ - Create a :class:`RelatedCognosDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosDataset(guid=self.guid) - return RelatedCognosDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -612,9 +544,6 @@ def _cognos_dataset_to_nested(cognos_dataset: CognosDataset) -> CognosDatasetNes is_incomplete=cognos_dataset.is_incomplete, provenance_type=cognos_dataset.provenance_type, home_id=cognos_dataset.home_id, - depth=cognos_dataset.depth, - immediate_upstream=cognos_dataset.immediate_upstream, - immediate_downstream=cognos_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -648,6 +577,7 @@ def _cognos_dataset_from_nested(nested: CognosDatasetNested) -> CognosDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -656,9 +586,6 @@ def _cognos_dataset_from_nested(nested: CognosDatasetNested) -> CognosDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognos_datasource.py b/pyatlan_v9/model/assets/cognos_datasource.py index 0a76f6388..f852c7f11 100644 --- a/pyatlan_v9/model/assets/cognos_datasource.py +++ b/pyatlan_v9/model/assets/cognos_datasource.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import RelatedCognosDatasource from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -62,7 +61,7 @@ class CognosDatasource(Asset): Instance of a Cognos datasource in Atlan. """ - COGNOS_DATASOURCE_CONNECTION_STRING: ClassVar[Any] = None + COGNOS_CONNECTION_STRING: ClassVar[Any] = None COGNOS_ID: ClassVar[Any] = None COGNOS_PATH: ClassVar[Any] = None COGNOS_PARENT_NAME: ClassVar[Any] = None @@ -101,7 +100,9 @@ class CognosDatasource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - cognos_datasource_connection_string: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "CognosDatasource" + + cognos_connection_string: Union[str, None, UnsetType] = UNSET """Connection string of a Cognos datasource.""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -226,66 +227,6 @@ class CognosDatasource(Asset): def __post_init__(self) -> None: self.type_name = "CognosDatasource" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosDatasource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CognosDatasource validation failed: {errors}") - - def minimize(self) -> "CognosDatasource": - """ - Return a minimal copy of this CognosDatasource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosDatasource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosDatasource instance with only the minimum required fields. - """ - self.validate() - return CognosDatasource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosDatasource": - """ - Create a :class:`RelatedCognosDatasource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosDatasource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosDatasource(guid=self.guid) - return RelatedCognosDatasource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -343,7 +284,7 @@ def from_json( class CognosDatasourceAttributes(AssetAttributes): """CognosDatasource-specific attributes for nested API format.""" - cognos_datasource_connection_string: Union[str, None, UnsetType] = UNSET + cognos_connection_string: Union[str, None, UnsetType] = UNSET """Connection string of a Cognos datasource.""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -527,7 +468,7 @@ def _populate_cognos_datasource_attrs( ) -> None: """Populate CognosDatasource-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.cognos_datasource_connection_string = obj.cognos_datasource_connection_string + attrs.cognos_connection_string = obj.cognos_connection_string attrs.cognos_id = obj.cognos_id attrs.cognos_path = obj.cognos_path attrs.cognos_parent_name = obj.cognos_parent_name @@ -542,9 +483,7 @@ def _populate_cognos_datasource_attrs( def _extract_cognos_datasource_attrs(attrs: CognosDatasourceAttributes) -> dict: """Extract all CognosDatasource attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["cognos_datasource_connection_string"] = ( - attrs.cognos_datasource_connection_string - ) + result["cognos_connection_string"] = attrs.cognos_connection_string result["cognos_id"] = attrs.cognos_id result["cognos_path"] = attrs.cognos_path result["cognos_parent_name"] = attrs.cognos_parent_name @@ -594,9 +533,6 @@ def _cognos_datasource_to_nested( is_incomplete=cognos_datasource.is_incomplete, provenance_type=cognos_datasource.provenance_type, home_id=cognos_datasource.home_id, - depth=cognos_datasource.depth, - immediate_upstream=cognos_datasource.immediate_upstream, - immediate_downstream=cognos_datasource.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -630,6 +566,7 @@ def _cognos_datasource_from_nested(nested: CognosDatasourceNested) -> CognosData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -638,9 +575,6 @@ def _cognos_datasource_from_nested(nested: CognosDatasourceNested) -> CognosData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_datasource_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -670,8 +604,8 @@ def _cognos_datasource_from_nested_bytes(data: bytes, serde: Serde) -> CognosDat RelationField, ) -CognosDatasource.COGNOS_DATASOURCE_CONNECTION_STRING = KeywordField( - "cognosDatasourceConnectionString", "cognosDatasourceConnectionString" +CognosDatasource.COGNOS_CONNECTION_STRING = KeywordField( + "cognosConnectionString", "cognosConnectionString" ) CognosDatasource.COGNOS_ID = KeywordField("cognosId", "cognosId") CognosDatasource.COGNOS_PATH = KeywordField("cognosPath", "cognosPath") diff --git a/pyatlan_v9/model/assets/cognos_exploration.py b/pyatlan_v9/model/assets/cognos_exploration.py index a0fc16788..1327ff779 100644 --- a/pyatlan_v9/model/assets/cognos_exploration.py +++ b/pyatlan_v9/model/assets/cognos_exploration.py @@ -38,11 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosExploration, - RelatedCognosFolder, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -107,6 +103,8 @@ class CognosExploration(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosExploration" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -243,72 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosExploration instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosExploration validation failed: {errors}") - - def minimize(self) -> "CognosExploration": - """ - Return a minimal copy of this CognosExploration with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosExploration with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosExploration instance with only the minimum required fields. - """ - self.validate() - return CognosExploration(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosExploration": - """ - Create a :class:`RelatedCognosExploration` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosExploration reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosExploration(guid=self.guid) - return RelatedCognosExploration(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -618,9 +550,6 @@ def _cognos_exploration_to_nested( is_incomplete=cognos_exploration.is_incomplete, provenance_type=cognos_exploration.provenance_type, home_id=cognos_exploration.home_id, - depth=cognos_exploration.depth, - immediate_upstream=cognos_exploration.immediate_upstream, - immediate_downstream=cognos_exploration.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -656,6 +585,7 @@ def _cognos_exploration_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -664,9 +594,6 @@ def _cognos_exploration_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_exploration_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognos_file.py b/pyatlan_v9/model/assets/cognos_file.py index 17c61c652..49195642e 100644 --- a/pyatlan_v9/model/assets/cognos_file.py +++ b/pyatlan_v9/model/assets/cognos_file.py @@ -38,7 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import RelatedCognosColumn, RelatedCognosFile, RelatedCognosFolder +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -103,6 +103,8 @@ class CognosFile(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosFile" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -239,72 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosFile instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosFile validation failed: {errors}") - - def minimize(self) -> "CognosFile": - """ - Return a minimal copy of this CognosFile with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosFile with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosFile instance with only the minimum required fields. - """ - self.validate() - return CognosFile(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosFile": - """ - Create a :class:`RelatedCognosFile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosFile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosFile(guid=self.guid) - return RelatedCognosFile(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -604,9 +540,6 @@ def _cognos_file_to_nested(cognos_file: CognosFile) -> CognosFileNested: is_incomplete=cognos_file.is_incomplete, provenance_type=cognos_file.provenance_type, home_id=cognos_file.home_id, - depth=cognos_file.depth, - immediate_upstream=cognos_file.immediate_upstream, - immediate_downstream=cognos_file.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -638,6 +571,7 @@ def _cognos_file_from_nested(nested: CognosFileNested) -> CognosFile: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -646,9 +580,6 @@ def _cognos_file_from_nested(nested: CognosFileNested) -> CognosFile: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_file_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognos_folder.py b/pyatlan_v9/model/assets/cognos_folder.py index df4c368dc..ed7de40c1 100644 --- a/pyatlan_v9/model/assets/cognos_folder.py +++ b/pyatlan_v9/model/assets/cognos_folder.py @@ -72,8 +72,8 @@ class CognosFolder(Asset): Instance of a Cognos folder in Atlan. """ - COGNOS_FOLDER_SUB_FOLDER_COUNT: ClassVar[Any] = None - COGNOS_FOLDER_CHILD_OBJECTS_COUNT: ClassVar[Any] = None + COGNOS_SUB_FOLDER_COUNT: ClassVar[Any] = None + COGNOS_CHILD_OBJECTS_COUNT: ClassVar[Any] = None COGNOS_ID: ClassVar[Any] = None COGNOS_PATH: ClassVar[Any] = None COGNOS_PARENT_NAME: ClassVar[Any] = None @@ -121,10 +121,12 @@ class CognosFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - cognos_folder_sub_folder_count: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "CognosFolder" + + cognos_sub_folder_count: Union[int, None, UnsetType] = UNSET """Number of sub-folders in the folder.""" - cognos_folder_child_objects_count: Union[int, None, UnsetType] = UNSET + cognos_child_objects_count: Union[int, None, UnsetType] = UNSET """Number of children in the folder (excluding subfolders).""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -282,70 +284,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"CognosFolder validation failed: {errors}") - - def minimize(self) -> "CognosFolder": - """ - Return a minimal copy of this CognosFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosFolder instance with only the minimum required fields. - """ - self.validate() - return CognosFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosFolder": - """ - Create a :class:`RelatedCognosFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosFolder(guid=self.guid) - return RelatedCognosFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -401,10 +339,10 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> CognosFolde class CognosFolderAttributes(AssetAttributes): """CognosFolder-specific attributes for nested API format.""" - cognos_folder_sub_folder_count: Union[int, None, UnsetType] = UNSET + cognos_sub_folder_count: Union[int, None, UnsetType] = UNSET """Number of sub-folders in the folder.""" - cognos_folder_child_objects_count: Union[int, None, UnsetType] = UNSET + cognos_child_objects_count: Union[int, None, UnsetType] = UNSET """Number of children in the folder (excluding subfolders).""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -624,8 +562,8 @@ def _populate_cognos_folder_attrs( ) -> None: """Populate CognosFolder-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.cognos_folder_sub_folder_count = obj.cognos_folder_sub_folder_count - attrs.cognos_folder_child_objects_count = obj.cognos_folder_child_objects_count + attrs.cognos_sub_folder_count = obj.cognos_sub_folder_count + attrs.cognos_child_objects_count = obj.cognos_child_objects_count attrs.cognos_id = obj.cognos_id attrs.cognos_path = obj.cognos_path attrs.cognos_parent_name = obj.cognos_parent_name @@ -640,10 +578,8 @@ def _populate_cognos_folder_attrs( def _extract_cognos_folder_attrs(attrs: CognosFolderAttributes) -> dict: """Extract all CognosFolder attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["cognos_folder_sub_folder_count"] = attrs.cognos_folder_sub_folder_count - result["cognos_folder_child_objects_count"] = ( - attrs.cognos_folder_child_objects_count - ) + result["cognos_sub_folder_count"] = attrs.cognos_sub_folder_count + result["cognos_child_objects_count"] = attrs.cognos_child_objects_count result["cognos_id"] = attrs.cognos_id result["cognos_path"] = attrs.cognos_path result["cognos_parent_name"] = attrs.cognos_parent_name @@ -689,9 +625,6 @@ def _cognos_folder_to_nested(cognos_folder: CognosFolder) -> CognosFolderNested: is_incomplete=cognos_folder.is_incomplete, provenance_type=cognos_folder.provenance_type, home_id=cognos_folder.home_id, - depth=cognos_folder.depth, - immediate_upstream=cognos_folder.immediate_upstream, - immediate_downstream=cognos_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -725,6 +658,7 @@ def _cognos_folder_from_nested(nested: CognosFolderNested) -> CognosFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -733,9 +667,6 @@ def _cognos_folder_from_nested(nested: CognosFolderNested) -> CognosFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -764,11 +695,11 @@ def _cognos_folder_from_nested_bytes(data: bytes, serde: Serde) -> CognosFolder: RelationField, ) -CognosFolder.COGNOS_FOLDER_SUB_FOLDER_COUNT = NumericField( - "cognosFolderSubFolderCount", "cognosFolderSubFolderCount" +CognosFolder.COGNOS_SUB_FOLDER_COUNT = NumericField( + "cognosSubFolderCount", "cognosSubFolderCount" ) -CognosFolder.COGNOS_FOLDER_CHILD_OBJECTS_COUNT = NumericField( - "cognosFolderChildObjectsCount", "cognosFolderChildObjectsCount" +CognosFolder.COGNOS_CHILD_OBJECTS_COUNT = NumericField( + "cognosChildObjectsCount", "cognosChildObjectsCount" ) CognosFolder.COGNOS_ID = KeywordField("cognosId", "cognosId") CognosFolder.COGNOS_PATH = KeywordField("cognosPath", "cognosPath") diff --git a/pyatlan_v9/model/assets/cognos_module.py b/pyatlan_v9/model/assets/cognos_module.py index adc96ed47..814c7469d 100644 --- a/pyatlan_v9/model/assets/cognos_module.py +++ b/pyatlan_v9/model/assets/cognos_module.py @@ -38,11 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosFolder, - RelatedCognosModule, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -107,6 +103,8 @@ class CognosModule(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosModule" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -243,72 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosModule instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosModule validation failed: {errors}") - - def minimize(self) -> "CognosModule": - """ - Return a minimal copy of this CognosModule with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosModule with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosModule instance with only the minimum required fields. - """ - self.validate() - return CognosModule(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosModule": - """ - Create a :class:`RelatedCognosModule` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosModule reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosModule(guid=self.guid) - return RelatedCognosModule(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -612,9 +544,6 @@ def _cognos_module_to_nested(cognos_module: CognosModule) -> CognosModuleNested: is_incomplete=cognos_module.is_incomplete, provenance_type=cognos_module.provenance_type, home_id=cognos_module.home_id, - depth=cognos_module.depth, - immediate_upstream=cognos_module.immediate_upstream, - immediate_downstream=cognos_module.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -648,6 +577,7 @@ def _cognos_module_from_nested(nested: CognosModuleNested) -> CognosModule: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -656,9 +586,6 @@ def _cognos_module_from_nested(nested: CognosModuleNested) -> CognosModule: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_module_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognos_package.py b/pyatlan_v9/model/assets/cognos_package.py index be5cf2974..33868324b 100644 --- a/pyatlan_v9/model/assets/cognos_package.py +++ b/pyatlan_v9/model/assets/cognos_package.py @@ -38,11 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosFolder, - RelatedCognosPackage, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -107,6 +103,8 @@ class CognosPackage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosPackage" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -243,72 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosPackage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosPackage validation failed: {errors}") - - def minimize(self) -> "CognosPackage": - """ - Return a minimal copy of this CognosPackage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosPackage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosPackage instance with only the minimum required fields. - """ - self.validate() - return CognosPackage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosPackage": - """ - Create a :class:`RelatedCognosPackage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosPackage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosPackage(guid=self.guid) - return RelatedCognosPackage(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -612,9 +544,6 @@ def _cognos_package_to_nested(cognos_package: CognosPackage) -> CognosPackageNes is_incomplete=cognos_package.is_incomplete, provenance_type=cognos_package.provenance_type, home_id=cognos_package.home_id, - depth=cognos_package.depth, - immediate_upstream=cognos_package.immediate_upstream, - immediate_downstream=cognos_package.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -648,6 +577,7 @@ def _cognos_package_from_nested(nested: CognosPackageNested) -> CognosPackage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -656,9 +586,6 @@ def _cognos_package_from_nested(nested: CognosPackageNested) -> CognosPackage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_package_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cognos_related.py b/pyatlan_v9/model/assets/cognos_related.py index d3e3d5d2d..1f3936cea 100644 --- a/pyatlan_v9/model/assets/cognos_related.py +++ b/pyatlan_v9/model/assets/cognos_related.py @@ -100,7 +100,7 @@ class RelatedCognosDatasource(RelatedCognos): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "CognosDatasource" so it serializes correctly - cognos_datasource_connection_string: Union[str, None, UnsetType] = UNSET + cognos_connection_string: Union[str, None, UnsetType] = UNSET """Connection string of a Cognos datasource.""" def __post_init__(self) -> None: @@ -148,10 +148,10 @@ class RelatedCognosFolder(RelatedCognos): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "CognosFolder" so it serializes correctly - cognos_folder_sub_folder_count: Union[int, None, UnsetType] = UNSET + cognos_sub_folder_count: Union[int, None, UnsetType] = UNSET """Number of sub-folders in the folder.""" - cognos_folder_child_objects_count: Union[int, None, UnsetType] = UNSET + cognos_child_objects_count: Union[int, None, UnsetType] = UNSET """Number of children in the folder (excluding subfolders).""" def __post_init__(self) -> None: @@ -214,13 +214,13 @@ class RelatedCognosColumn(RelatedCognos): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "CognosColumn" so it serializes correctly - cognos_column_datatype: Union[str, None, UnsetType] = UNSET + cognos_datatype: Union[str, None, UnsetType] = UNSET """Data type of the CognosColumn.""" - cognos_column_nullable: Union[str, None, UnsetType] = UNSET + cognos_nullable: Union[str, None, UnsetType] = UNSET """Whether the CognosColumn is nullable.""" - cognos_column_regular_aggregate: Union[str, None, UnsetType] = UNSET + cognos_regular_aggregate: Union[str, None, UnsetType] = UNSET """How data should be summarized when aggregated across different dimensions or groupings.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/cognos_report.py b/pyatlan_v9/model/assets/cognos_report.py index 6104aaae4..fbf639551 100644 --- a/pyatlan_v9/model/assets/cognos_report.py +++ b/pyatlan_v9/model/assets/cognos_report.py @@ -38,7 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import RelatedCognosFolder, RelatedCognosReport +from .cognos_related import RelatedCognosFolder from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -102,6 +102,8 @@ class CognosReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosReport" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -235,72 +237,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosReport validation failed: {errors}") - - def minimize(self) -> "CognosReport": - """ - Return a minimal copy of this CognosReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosReport instance with only the minimum required fields. - """ - self.validate() - return CognosReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosReport": - """ - Create a :class:`RelatedCognosReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosReport(guid=self.guid) - return RelatedCognosReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -600,9 +536,6 @@ def _cognos_report_to_nested(cognos_report: CognosReport) -> CognosReportNested: is_incomplete=cognos_report.is_incomplete, provenance_type=cognos_report.provenance_type, home_id=cognos_report.home_id, - depth=cognos_report.depth, - immediate_upstream=cognos_report.immediate_upstream, - immediate_downstream=cognos_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -636,6 +569,7 @@ def _cognos_report_from_nested(nested: CognosReportNested) -> CognosReport: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -644,9 +578,6 @@ def _cognos_report_from_nested(nested: CognosReportNested) -> CognosReport: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_report_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/collection.py b/pyatlan_v9/model/assets/collection.py index f082eab7f..f55ee71ec 100644 --- a/pyatlan_v9/model/assets/collection.py +++ b/pyatlan_v9/model/assets/collection.py @@ -43,7 +43,7 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor -from .namespace_related import RelatedCollection, RelatedFolder +from .namespace_related import RelatedFolder from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -84,6 +84,8 @@ class Collection(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Collection" + icon: Union[str, None, UnsetType] = UNSET """Image used to represent this collection.""" @@ -159,66 +161,6 @@ class Collection(Asset): def __post_init__(self) -> None: self.type_name = "Collection" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Collection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Collection validation failed: {errors}") - - def minimize(self) -> "Collection": - """ - Return a minimal copy of this Collection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Collection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Collection instance with only the minimum required fields. - """ - self.validate() - return Collection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCollection": - """ - Create a :class:`RelatedCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCollection(guid=self.guid) - return RelatedCollection(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, client: "AtlanClient", name: str) -> "Collection": @@ -461,9 +403,6 @@ def _collection_to_nested(collection: Collection) -> CollectionNested: is_incomplete=collection.is_incomplete, provenance_type=collection.provenance_type, home_id=collection.home_id, - depth=collection.depth, - immediate_upstream=collection.immediate_upstream, - immediate_downstream=collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -495,6 +434,7 @@ def _collection_from_nested(nested: CollectionNested) -> Collection: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -503,9 +443,6 @@ def _collection_from_nested(nested: CollectionNested) -> Collection: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_collection_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/column.py b/pyatlan_v9/model/assets/column.py index 3e1eb2f8b..40afd37e2 100644 --- a/pyatlan_v9/model/assets/column.py +++ b/pyatlan_v9/model/assets/column.py @@ -92,8 +92,8 @@ class Column(Asset): DATA_TYPE: ClassVar[Any] = None SUB_DATA_TYPE: ClassVar[Any] = None - COLUMN_COMPRESSION: ClassVar[Any] = None - COLUMN_ENCODING: ClassVar[Any] = None + SQL_COMPRESSION: ClassVar[Any] = None + SQL_ENCODING: ClassVar[Any] = None RAW_DATA_TYPE_DEFINITION: ClassVar[Any] = None ORDER: ClassVar[Any] = None NESTED_COLUMN_ORDER: ClassVar[Any] = None @@ -118,46 +118,46 @@ class Column(Asset): VALIDATIONS: ClassVar[Any] = None PARENT_COLUMN_QUALIFIED_NAME: ClassVar[Any] = None PARENT_COLUMN_NAME: ClassVar[Any] = None - COLUMN_DISTINCT_VALUES_COUNT: ClassVar[Any] = None - COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[Any] = None - COLUMN_HISTOGRAM: ClassVar[Any] = None - COLUMN_MAX: ClassVar[Any] = None - COLUMN_MIN: ClassVar[Any] = None - COLUMN_MEAN: ClassVar[Any] = None - COLUMN_SUM: ClassVar[Any] = None - COLUMN_MEDIAN: ClassVar[Any] = None - COLUMN_STANDARD_DEVIATION: ClassVar[Any] = None - COLUMN_UNIQUE_VALUES_COUNT: ClassVar[Any] = None - COLUMN_UNIQUE_VALUES_COUNT_LONG: ClassVar[Any] = None - COLUMN_AVERAGE: ClassVar[Any] = None - COLUMN_AVERAGE_LENGTH: ClassVar[Any] = None - COLUMN_DUPLICATE_VALUES_COUNT: ClassVar[Any] = None - COLUMN_DUPLICATE_VALUES_COUNT_LONG: ClassVar[Any] = None - COLUMN_MAXIMUM_STRING_LENGTH: ClassVar[Any] = None + SQL_DISTINCT_VALUES_COUNT: ClassVar[Any] = None + SQL_DISTINCT_VALUES_COUNT_LONG: ClassVar[Any] = None + SQL_HISTOGRAM: ClassVar[Any] = None + SQL_MAX: ClassVar[Any] = None + SQL_MIN: ClassVar[Any] = None + SQL_MEAN: ClassVar[Any] = None + SQL_SUM: ClassVar[Any] = None + SQL_MEDIAN: ClassVar[Any] = None + SQL_STANDARD_DEVIATION: ClassVar[Any] = None + SQL_UNIQUE_VALUES_COUNT: ClassVar[Any] = None + SQL_UNIQUE_VALUES_COUNT_LONG: ClassVar[Any] = None + SQL_AVERAGE: ClassVar[Any] = None + SQL_AVERAGE_LENGTH: ClassVar[Any] = None + SQL_DUPLICATE_VALUES_COUNT: ClassVar[Any] = None + SQL_DUPLICATE_VALUES_COUNT_LONG: ClassVar[Any] = None + SQL_MAXIMUM_STRING_LENGTH: ClassVar[Any] = None COLUMN_MAXS: ClassVar[Any] = None - COLUMN_MINIMUM_STRING_LENGTH: ClassVar[Any] = None + SQL_MINIMUM_STRING_LENGTH: ClassVar[Any] = None COLUMN_MINS: ClassVar[Any] = None - COLUMN_MISSING_VALUES_COUNT: ClassVar[Any] = None - COLUMN_MISSING_VALUES_COUNT_LONG: ClassVar[Any] = None - COLUMN_MISSING_VALUES_PERCENTAGE: ClassVar[Any] = None - COLUMN_UNIQUENESS_PERCENTAGE: ClassVar[Any] = None - COLUMN_VARIANCE: ClassVar[Any] = None + SQL_MISSING_VALUES_COUNT: ClassVar[Any] = None + SQL_MISSING_VALUES_COUNT_LONG: ClassVar[Any] = None + SQL_MISSING_VALUES_PERCENTAGE: ClassVar[Any] = None + SQL_UNIQUENESS_PERCENTAGE: ClassVar[Any] = None + SQL_VARIANCE: ClassVar[Any] = None COLUMN_TOP_VALUES: ClassVar[Any] = None - COLUMN_MAX_VALUE: ClassVar[Any] = None - COLUMN_MIN_VALUE: ClassVar[Any] = None - COLUMN_MEAN_VALUE: ClassVar[Any] = None - COLUMN_SUM_VALUE: ClassVar[Any] = None - COLUMN_MEDIAN_VALUE: ClassVar[Any] = None - COLUMN_STANDARD_DEVIATION_VALUE: ClassVar[Any] = None - COLUMN_AVERAGE_VALUE: ClassVar[Any] = None - COLUMN_VARIANCE_VALUE: ClassVar[Any] = None - COLUMN_AVERAGE_LENGTH_VALUE: ClassVar[Any] = None - COLUMN_DISTRIBUTION_HISTOGRAM: ClassVar[Any] = None - COLUMN_DEPTH_LEVEL: ClassVar[Any] = None + SQL_MAX_VALUE: ClassVar[Any] = None + SQL_MIN_VALUE: ClassVar[Any] = None + SQL_MEAN_VALUE: ClassVar[Any] = None + SQL_SUM_VALUE: ClassVar[Any] = None + SQL_MEDIAN_VALUE: ClassVar[Any] = None + SQL_STANDARD_DEVIATION_VALUE: ClassVar[Any] = None + SQL_AVERAGE_VALUE: ClassVar[Any] = None + SQL_VARIANCE_VALUE: ClassVar[Any] = None + SQL_AVERAGE_LENGTH_VALUE: ClassVar[Any] = None + SQL_DISTRIBUTION_HISTOGRAM: ClassVar[Any] = None + SQL_DEPTH_LEVEL: ClassVar[Any] = None NOSQL_COLLECTION_NAME: ClassVar[Any] = None NOSQL_COLLECTION_QUALIFIED_NAME: ClassVar[Any] = None - COLUMN_IS_MEASURE: ClassVar[Any] = None - COLUMN_MEASURE_TYPE: ClassVar[Any] = None + SQL_IS_MEASURE: ClassVar[Any] = None + SQL_MEASURE_TYPE: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -232,16 +232,18 @@ class Column(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Column" + data_type: Union[str, None, UnsetType] = UNSET """Data type of values in this column.""" sub_data_type: Union[str, None, UnsetType] = UNSET """Sub-data type of this column.""" - column_compression: Union[str, None, UnsetType] = UNSET + sql_compression: Union[str, None, UnsetType] = UNSET """Compression type of this column.""" - column_encoding: Union[str, None, UnsetType] = UNSET + sql_encoding: Union[str, None, UnsetType] = UNSET """Encoding type of this column.""" raw_data_type_definition: Union[str, None, UnsetType] = UNSET @@ -316,112 +318,112 @@ class Column(Asset): parent_column_name: Union[str, None, UnsetType] = UNSET """Simple name of the column this column is nested within, for STRUCT and NESTED columns.""" - column_distinct_values_count: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" - column_max: Union[float, None, UnsetType] = UNSET + sql_max: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min: Union[float, None, UnsetType] = UNSET + sql_min: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean: Union[float, None, UnsetType] = UNSET + sql_mean: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum: Union[float, None, UnsetType] = UNSET + sql_sum: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median: Union[float, None, UnsetType] = UNSET + sql_median: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation: Union[float, None, UnsetType] = UNSET + sql_standard_deviation: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_unique_values_count: Union[int, None, UnsetType] = UNSET + sql_unique_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_unique_values_count_long: Union[int, None, UnsetType] = UNSET + sql_unique_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_average: Union[float, None, UnsetType] = UNSET + sql_average: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_average_length: Union[float, None, UnsetType] = UNSET + sql_average_length: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_duplicate_values_count: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_maximum_string_length: Union[int, None, UnsetType] = UNSET + sql_maximum_string_length: Union[int, None, UnsetType] = UNSET """Length of the longest value in a string column.""" column_maxs: Union[List[str], None, UnsetType] = UNSET """List of the greatest values in a column.""" - column_minimum_string_length: Union[int, None, UnsetType] = UNSET + sql_minimum_string_length: Union[int, None, UnsetType] = UNSET """Length of the shortest value in a string column.""" column_mins: Union[List[str], None, UnsetType] = UNSET """List of the least values in a column.""" - column_missing_values_count: Union[int, None, UnsetType] = UNSET + sql_missing_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_count_long: Union[int, None, UnsetType] = UNSET + sql_missing_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_percentage: Union[float, None, UnsetType] = UNSET + sql_missing_values_percentage: Union[float, None, UnsetType] = UNSET """Percentage of rows in a column that do not contain content.""" - column_uniqueness_percentage: Union[float, None, UnsetType] = UNSET + sql_uniqueness_percentage: Union[float, None, UnsetType] = UNSET """Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique.""" - column_variance: Union[float, None, UnsetType] = UNSET + sql_variance: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" column_top_values: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """List of top values in this column.""" - column_max_value: Union[float, None, UnsetType] = UNSET + sql_max_value: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min_value: Union[float, None, UnsetType] = UNSET + sql_min_value: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean_value: Union[float, None, UnsetType] = UNSET + sql_mean_value: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum_value: Union[float, None, UnsetType] = UNSET + sql_sum_value: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median_value: Union[float, None, UnsetType] = UNSET + sql_median_value: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation_value: Union[float, None, UnsetType] = UNSET + sql_standard_deviation_value: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_average_value: Union[float, None, UnsetType] = UNSET + sql_average_value: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_variance_value: Union[float, None, UnsetType] = UNSET + sql_variance_value: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" - column_average_length_value: Union[float, None, UnsetType] = UNSET + sql_average_length_value: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """Detailed information representing a histogram of values for a column.""" - column_depth_level: Union[int, None, UnsetType] = UNSET + sql_depth_level: Union[int, None, UnsetType] = UNSET """Level of nesting of this column, used for STRUCT and NESTED columns.""" nosql_collection_name: Union[str, None, UnsetType] = UNSET @@ -430,10 +432,10 @@ class Column(Asset): nosql_collection_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the cosmos/mongo collection in which this SQL asset (column) exists, or empty if it does not exist within a cosmos/mongo collection.""" - column_is_measure: Union[bool, None, UnsetType] = UNSET + sql_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is of type measure/calculated.""" - column_measure_type: Union[str, None, UnsetType] = UNSET + sql_measure_type: Union[str, None, UnsetType] = UNSET """The type of measure/calculated column this is, eg: base, calculated, derived.""" query_count: Union[int, None, UnsetType] = UNSET @@ -690,99 +692,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Column instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if self.order is UNSET: - errors.append("order is required for creation") - if ( - self.table is UNSET - and self.table_partition is UNSET - and self.view is UNSET - and self.materialised_view is UNSET - ): - errors.append( - "one of table, table_partition, view, materialised_view is required for creation" - ) - if self.table is not UNSET or self.table_partition is not UNSET: - if self.table_name is UNSET: - errors.append("table_name is required for creation") - if self.table_qualified_name is UNSET: - errors.append("table_qualified_name is required for creation") - if self.view is not UNSET or self.materialised_view is not UNSET: - if self.view_name is UNSET: - errors.append("view_name is required for creation") - if self.view_qualified_name is UNSET: - errors.append("view_qualified_name is required for creation") - if errors: - raise ValueError(f"Column validation failed: {errors}") - - def minimize(self) -> "Column": - """ - Return a minimal copy of this Column with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Column with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Column instance with only the minimum required fields. - """ - self.validate() - return Column(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedColumn": - """ - Create a :class:`RelatedColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedColumn(guid=self.guid) - return RelatedColumn(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -1040,10 +949,10 @@ class ColumnAttributes(AssetAttributes): sub_data_type: Union[str, None, UnsetType] = UNSET """Sub-data type of this column.""" - column_compression: Union[str, None, UnsetType] = UNSET + sql_compression: Union[str, None, UnsetType] = UNSET """Compression type of this column.""" - column_encoding: Union[str, None, UnsetType] = UNSET + sql_encoding: Union[str, None, UnsetType] = UNSET """Encoding type of this column.""" raw_data_type_definition: Union[str, None, UnsetType] = UNSET @@ -1118,112 +1027,112 @@ class ColumnAttributes(AssetAttributes): parent_column_name: Union[str, None, UnsetType] = UNSET """Simple name of the column this column is nested within, for STRUCT and NESTED columns.""" - column_distinct_values_count: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" - column_max: Union[float, None, UnsetType] = UNSET + sql_max: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min: Union[float, None, UnsetType] = UNSET + sql_min: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean: Union[float, None, UnsetType] = UNSET + sql_mean: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum: Union[float, None, UnsetType] = UNSET + sql_sum: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median: Union[float, None, UnsetType] = UNSET + sql_median: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation: Union[float, None, UnsetType] = UNSET + sql_standard_deviation: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_unique_values_count: Union[int, None, UnsetType] = UNSET + sql_unique_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_unique_values_count_long: Union[int, None, UnsetType] = UNSET + sql_unique_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_average: Union[float, None, UnsetType] = UNSET + sql_average: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_average_length: Union[float, None, UnsetType] = UNSET + sql_average_length: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_duplicate_values_count: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_maximum_string_length: Union[int, None, UnsetType] = UNSET + sql_maximum_string_length: Union[int, None, UnsetType] = UNSET """Length of the longest value in a string column.""" column_maxs: Union[List[str], None, UnsetType] = UNSET """List of the greatest values in a column.""" - column_minimum_string_length: Union[int, None, UnsetType] = UNSET + sql_minimum_string_length: Union[int, None, UnsetType] = UNSET """Length of the shortest value in a string column.""" column_mins: Union[List[str], None, UnsetType] = UNSET """List of the least values in a column.""" - column_missing_values_count: Union[int, None, UnsetType] = UNSET + sql_missing_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_count_long: Union[int, None, UnsetType] = UNSET + sql_missing_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_percentage: Union[float, None, UnsetType] = UNSET + sql_missing_values_percentage: Union[float, None, UnsetType] = UNSET """Percentage of rows in a column that do not contain content.""" - column_uniqueness_percentage: Union[float, None, UnsetType] = UNSET + sql_uniqueness_percentage: Union[float, None, UnsetType] = UNSET """Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique.""" - column_variance: Union[float, None, UnsetType] = UNSET + sql_variance: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" column_top_values: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """List of top values in this column.""" - column_max_value: Union[float, None, UnsetType] = UNSET + sql_max_value: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min_value: Union[float, None, UnsetType] = UNSET + sql_min_value: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean_value: Union[float, None, UnsetType] = UNSET + sql_mean_value: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum_value: Union[float, None, UnsetType] = UNSET + sql_sum_value: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median_value: Union[float, None, UnsetType] = UNSET + sql_median_value: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation_value: Union[float, None, UnsetType] = UNSET + sql_standard_deviation_value: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_average_value: Union[float, None, UnsetType] = UNSET + sql_average_value: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_variance_value: Union[float, None, UnsetType] = UNSET + sql_variance_value: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" - column_average_length_value: Union[float, None, UnsetType] = UNSET + sql_average_length_value: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """Detailed information representing a histogram of values for a column.""" - column_depth_level: Union[int, None, UnsetType] = UNSET + sql_depth_level: Union[int, None, UnsetType] = UNSET """Level of nesting of this column, used for STRUCT and NESTED columns.""" nosql_collection_name: Union[str, None, UnsetType] = UNSET @@ -1232,10 +1141,10 @@ class ColumnAttributes(AssetAttributes): nosql_collection_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the cosmos/mongo collection in which this SQL asset (column) exists, or empty if it does not exist within a cosmos/mongo collection.""" - column_is_measure: Union[bool, None, UnsetType] = UNSET + sql_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is of type measure/calculated.""" - column_measure_type: Union[str, None, UnsetType] = UNSET + sql_measure_type: Union[str, None, UnsetType] = UNSET """The type of measure/calculated column this is, eg: base, calculated, derived.""" query_count: Union[int, None, UnsetType] = UNSET @@ -1568,8 +1477,8 @@ def _populate_column_attrs(attrs: ColumnAttributes, obj: Column) -> None: _populate_asset_attrs(attrs, obj) attrs.data_type = obj.data_type attrs.sub_data_type = obj.sub_data_type - attrs.column_compression = obj.column_compression - attrs.column_encoding = obj.column_encoding + attrs.sql_compression = obj.sql_compression + attrs.sql_encoding = obj.sql_encoding attrs.raw_data_type_definition = obj.raw_data_type_definition attrs.order = obj.order attrs.nested_column_order = obj.nested_column_order @@ -1594,46 +1503,46 @@ def _populate_column_attrs(attrs: ColumnAttributes, obj: Column) -> None: attrs.validations = obj.validations attrs.parent_column_qualified_name = obj.parent_column_qualified_name attrs.parent_column_name = obj.parent_column_name - attrs.column_distinct_values_count = obj.column_distinct_values_count - attrs.column_distinct_values_count_long = obj.column_distinct_values_count_long - attrs.column_histogram = obj.column_histogram - attrs.column_max = obj.column_max - attrs.column_min = obj.column_min - attrs.column_mean = obj.column_mean - attrs.column_sum = obj.column_sum - attrs.column_median = obj.column_median - attrs.column_standard_deviation = obj.column_standard_deviation - attrs.column_unique_values_count = obj.column_unique_values_count - attrs.column_unique_values_count_long = obj.column_unique_values_count_long - attrs.column_average = obj.column_average - attrs.column_average_length = obj.column_average_length - attrs.column_duplicate_values_count = obj.column_duplicate_values_count - attrs.column_duplicate_values_count_long = obj.column_duplicate_values_count_long - attrs.column_maximum_string_length = obj.column_maximum_string_length + attrs.sql_distinct_values_count = obj.sql_distinct_values_count + attrs.sql_distinct_values_count_long = obj.sql_distinct_values_count_long + attrs.sql_histogram = obj.sql_histogram + attrs.sql_max = obj.sql_max + attrs.sql_min = obj.sql_min + attrs.sql_mean = obj.sql_mean + attrs.sql_sum = obj.sql_sum + attrs.sql_median = obj.sql_median + attrs.sql_standard_deviation = obj.sql_standard_deviation + attrs.sql_unique_values_count = obj.sql_unique_values_count + attrs.sql_unique_values_count_long = obj.sql_unique_values_count_long + attrs.sql_average = obj.sql_average + attrs.sql_average_length = obj.sql_average_length + attrs.sql_duplicate_values_count = obj.sql_duplicate_values_count + attrs.sql_duplicate_values_count_long = obj.sql_duplicate_values_count_long + attrs.sql_maximum_string_length = obj.sql_maximum_string_length attrs.column_maxs = obj.column_maxs - attrs.column_minimum_string_length = obj.column_minimum_string_length + attrs.sql_minimum_string_length = obj.sql_minimum_string_length attrs.column_mins = obj.column_mins - attrs.column_missing_values_count = obj.column_missing_values_count - attrs.column_missing_values_count_long = obj.column_missing_values_count_long - attrs.column_missing_values_percentage = obj.column_missing_values_percentage - attrs.column_uniqueness_percentage = obj.column_uniqueness_percentage - attrs.column_variance = obj.column_variance + attrs.sql_missing_values_count = obj.sql_missing_values_count + attrs.sql_missing_values_count_long = obj.sql_missing_values_count_long + attrs.sql_missing_values_percentage = obj.sql_missing_values_percentage + attrs.sql_uniqueness_percentage = obj.sql_uniqueness_percentage + attrs.sql_variance = obj.sql_variance attrs.column_top_values = obj.column_top_values - attrs.column_max_value = obj.column_max_value - attrs.column_min_value = obj.column_min_value - attrs.column_mean_value = obj.column_mean_value - attrs.column_sum_value = obj.column_sum_value - attrs.column_median_value = obj.column_median_value - attrs.column_standard_deviation_value = obj.column_standard_deviation_value - attrs.column_average_value = obj.column_average_value - attrs.column_variance_value = obj.column_variance_value - attrs.column_average_length_value = obj.column_average_length_value - attrs.column_distribution_histogram = obj.column_distribution_histogram - attrs.column_depth_level = obj.column_depth_level + attrs.sql_max_value = obj.sql_max_value + attrs.sql_min_value = obj.sql_min_value + attrs.sql_mean_value = obj.sql_mean_value + attrs.sql_sum_value = obj.sql_sum_value + attrs.sql_median_value = obj.sql_median_value + attrs.sql_standard_deviation_value = obj.sql_standard_deviation_value + attrs.sql_average_value = obj.sql_average_value + attrs.sql_variance_value = obj.sql_variance_value + attrs.sql_average_length_value = obj.sql_average_length_value + attrs.sql_distribution_histogram = obj.sql_distribution_histogram + attrs.sql_depth_level = obj.sql_depth_level attrs.nosql_collection_name = obj.nosql_collection_name attrs.nosql_collection_qualified_name = obj.nosql_collection_qualified_name - attrs.column_is_measure = obj.column_is_measure - attrs.column_measure_type = obj.column_measure_type + attrs.sql_is_measure = obj.sql_is_measure + attrs.sql_measure_type = obj.sql_measure_type attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -1659,8 +1568,8 @@ def _extract_column_attrs(attrs: ColumnAttributes) -> dict: result = _extract_asset_attrs(attrs) result["data_type"] = attrs.data_type result["sub_data_type"] = attrs.sub_data_type - result["column_compression"] = attrs.column_compression - result["column_encoding"] = attrs.column_encoding + result["sql_compression"] = attrs.sql_compression + result["sql_encoding"] = attrs.sql_encoding result["raw_data_type_definition"] = attrs.raw_data_type_definition result["order"] = attrs.order result["nested_column_order"] = attrs.nested_column_order @@ -1685,50 +1594,46 @@ def _extract_column_attrs(attrs: ColumnAttributes) -> dict: result["validations"] = attrs.validations result["parent_column_qualified_name"] = attrs.parent_column_qualified_name result["parent_column_name"] = attrs.parent_column_name - result["column_distinct_values_count"] = attrs.column_distinct_values_count - result["column_distinct_values_count_long"] = ( - attrs.column_distinct_values_count_long - ) - result["column_histogram"] = attrs.column_histogram - result["column_max"] = attrs.column_max - result["column_min"] = attrs.column_min - result["column_mean"] = attrs.column_mean - result["column_sum"] = attrs.column_sum - result["column_median"] = attrs.column_median - result["column_standard_deviation"] = attrs.column_standard_deviation - result["column_unique_values_count"] = attrs.column_unique_values_count - result["column_unique_values_count_long"] = attrs.column_unique_values_count_long - result["column_average"] = attrs.column_average - result["column_average_length"] = attrs.column_average_length - result["column_duplicate_values_count"] = attrs.column_duplicate_values_count - result["column_duplicate_values_count_long"] = ( - attrs.column_duplicate_values_count_long - ) - result["column_maximum_string_length"] = attrs.column_maximum_string_length + result["sql_distinct_values_count"] = attrs.sql_distinct_values_count + result["sql_distinct_values_count_long"] = attrs.sql_distinct_values_count_long + result["sql_histogram"] = attrs.sql_histogram + result["sql_max"] = attrs.sql_max + result["sql_min"] = attrs.sql_min + result["sql_mean"] = attrs.sql_mean + result["sql_sum"] = attrs.sql_sum + result["sql_median"] = attrs.sql_median + result["sql_standard_deviation"] = attrs.sql_standard_deviation + result["sql_unique_values_count"] = attrs.sql_unique_values_count + result["sql_unique_values_count_long"] = attrs.sql_unique_values_count_long + result["sql_average"] = attrs.sql_average + result["sql_average_length"] = attrs.sql_average_length + result["sql_duplicate_values_count"] = attrs.sql_duplicate_values_count + result["sql_duplicate_values_count_long"] = attrs.sql_duplicate_values_count_long + result["sql_maximum_string_length"] = attrs.sql_maximum_string_length result["column_maxs"] = attrs.column_maxs - result["column_minimum_string_length"] = attrs.column_minimum_string_length + result["sql_minimum_string_length"] = attrs.sql_minimum_string_length result["column_mins"] = attrs.column_mins - result["column_missing_values_count"] = attrs.column_missing_values_count - result["column_missing_values_count_long"] = attrs.column_missing_values_count_long - result["column_missing_values_percentage"] = attrs.column_missing_values_percentage - result["column_uniqueness_percentage"] = attrs.column_uniqueness_percentage - result["column_variance"] = attrs.column_variance + result["sql_missing_values_count"] = attrs.sql_missing_values_count + result["sql_missing_values_count_long"] = attrs.sql_missing_values_count_long + result["sql_missing_values_percentage"] = attrs.sql_missing_values_percentage + result["sql_uniqueness_percentage"] = attrs.sql_uniqueness_percentage + result["sql_variance"] = attrs.sql_variance result["column_top_values"] = attrs.column_top_values - result["column_max_value"] = attrs.column_max_value - result["column_min_value"] = attrs.column_min_value - result["column_mean_value"] = attrs.column_mean_value - result["column_sum_value"] = attrs.column_sum_value - result["column_median_value"] = attrs.column_median_value - result["column_standard_deviation_value"] = attrs.column_standard_deviation_value - result["column_average_value"] = attrs.column_average_value - result["column_variance_value"] = attrs.column_variance_value - result["column_average_length_value"] = attrs.column_average_length_value - result["column_distribution_histogram"] = attrs.column_distribution_histogram - result["column_depth_level"] = attrs.column_depth_level + result["sql_max_value"] = attrs.sql_max_value + result["sql_min_value"] = attrs.sql_min_value + result["sql_mean_value"] = attrs.sql_mean_value + result["sql_sum_value"] = attrs.sql_sum_value + result["sql_median_value"] = attrs.sql_median_value + result["sql_standard_deviation_value"] = attrs.sql_standard_deviation_value + result["sql_average_value"] = attrs.sql_average_value + result["sql_variance_value"] = attrs.sql_variance_value + result["sql_average_length_value"] = attrs.sql_average_length_value + result["sql_distribution_histogram"] = attrs.sql_distribution_histogram + result["sql_depth_level"] = attrs.sql_depth_level result["nosql_collection_name"] = attrs.nosql_collection_name result["nosql_collection_qualified_name"] = attrs.nosql_collection_qualified_name - result["column_is_measure"] = attrs.column_is_measure - result["column_measure_type"] = attrs.column_measure_type + result["sql_is_measure"] = attrs.sql_is_measure + result["sql_measure_type"] = attrs.sql_measure_type result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -1785,9 +1690,6 @@ def _column_to_nested(column: Column) -> ColumnNested: is_incomplete=column.is_incomplete, provenance_type=column.provenance_type, home_id=column.home_id, - depth=column.depth, - immediate_upstream=column.immediate_upstream, - immediate_downstream=column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1817,6 +1719,7 @@ def _column_from_nested(nested: ColumnNested) -> Column: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1825,9 +1728,6 @@ def _column_from_nested(nested: ColumnNested) -> Column: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1858,8 +1758,8 @@ def _column_from_nested_bytes(data: bytes, serde: Serde) -> Column: Column.DATA_TYPE = KeywordTextField("dataType", "dataType", "dataType.text") Column.SUB_DATA_TYPE = KeywordField("subDataType", "subDataType") -Column.COLUMN_COMPRESSION = KeywordField("columnCompression", "columnCompression") -Column.COLUMN_ENCODING = KeywordField("columnEncoding", "columnEncoding") +Column.SQL_COMPRESSION = KeywordField("sqlCompression", "sqlCompression") +Column.SQL_ENCODING = KeywordField("sqlEncoding", "sqlEncoding") Column.RAW_DATA_TYPE_DEFINITION = KeywordField( "rawDataTypeDefinition", "rawDataTypeDefinition" ) @@ -1892,86 +1792,82 @@ def _column_from_nested_bytes(data: bytes, serde: Serde) -> Column: "parentColumnQualifiedName.text", ) Column.PARENT_COLUMN_NAME = KeywordField("parentColumnName", "parentColumnName") -Column.COLUMN_DISTINCT_VALUES_COUNT = NumericField( - "columnDistinctValuesCount", "columnDistinctValuesCount" -) -Column.COLUMN_DISTINCT_VALUES_COUNT_LONG = NumericField( - "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" +Column.SQL_DISTINCT_VALUES_COUNT = NumericField( + "sqlDistinctValuesCount", "sqlDistinctValuesCount" ) -Column.COLUMN_HISTOGRAM = KeywordField("columnHistogram", "columnHistogram") -Column.COLUMN_MAX = NumericField("columnMax", "columnMax") -Column.COLUMN_MIN = NumericField("columnMin", "columnMin") -Column.COLUMN_MEAN = NumericField("columnMean", "columnMean") -Column.COLUMN_SUM = NumericField("columnSum", "columnSum") -Column.COLUMN_MEDIAN = NumericField("columnMedian", "columnMedian") -Column.COLUMN_STANDARD_DEVIATION = NumericField( - "columnStandardDeviation", "columnStandardDeviation" +Column.SQL_DISTINCT_VALUES_COUNT_LONG = NumericField( + "sqlDistinctValuesCountLong", "sqlDistinctValuesCountLong" ) -Column.COLUMN_UNIQUE_VALUES_COUNT = NumericField( - "columnUniqueValuesCount", "columnUniqueValuesCount" +Column.SQL_HISTOGRAM = KeywordField("sqlHistogram", "sqlHistogram") +Column.SQL_MAX = NumericField("sqlMax", "sqlMax") +Column.SQL_MIN = NumericField("sqlMin", "sqlMin") +Column.SQL_MEAN = NumericField("sqlMean", "sqlMean") +Column.SQL_SUM = NumericField("sqlSum", "sqlSum") +Column.SQL_MEDIAN = NumericField("sqlMedian", "sqlMedian") +Column.SQL_STANDARD_DEVIATION = NumericField( + "sqlStandardDeviation", "sqlStandardDeviation" ) -Column.COLUMN_UNIQUE_VALUES_COUNT_LONG = NumericField( - "columnUniqueValuesCountLong", "columnUniqueValuesCountLong" +Column.SQL_UNIQUE_VALUES_COUNT = NumericField( + "sqlUniqueValuesCount", "sqlUniqueValuesCount" ) -Column.COLUMN_AVERAGE = NumericField("columnAverage", "columnAverage") -Column.COLUMN_AVERAGE_LENGTH = NumericField( - "columnAverageLength", "columnAverageLength" +Column.SQL_UNIQUE_VALUES_COUNT_LONG = NumericField( + "sqlUniqueValuesCountLong", "sqlUniqueValuesCountLong" ) -Column.COLUMN_DUPLICATE_VALUES_COUNT = NumericField( - "columnDuplicateValuesCount", "columnDuplicateValuesCount" +Column.SQL_AVERAGE = NumericField("sqlAverage", "sqlAverage") +Column.SQL_AVERAGE_LENGTH = NumericField("sqlAverageLength", "sqlAverageLength") +Column.SQL_DUPLICATE_VALUES_COUNT = NumericField( + "sqlDuplicateValuesCount", "sqlDuplicateValuesCount" ) -Column.COLUMN_DUPLICATE_VALUES_COUNT_LONG = NumericField( - "columnDuplicateValuesCountLong", "columnDuplicateValuesCountLong" +Column.SQL_DUPLICATE_VALUES_COUNT_LONG = NumericField( + "sqlDuplicateValuesCountLong", "sqlDuplicateValuesCountLong" ) -Column.COLUMN_MAXIMUM_STRING_LENGTH = NumericField( - "columnMaximumStringLength", "columnMaximumStringLength" +Column.SQL_MAXIMUM_STRING_LENGTH = NumericField( + "sqlMaximumStringLength", "sqlMaximumStringLength" ) Column.COLUMN_MAXS = KeywordField("columnMaxs", "columnMaxs") -Column.COLUMN_MINIMUM_STRING_LENGTH = NumericField( - "columnMinimumStringLength", "columnMinimumStringLength" +Column.SQL_MINIMUM_STRING_LENGTH = NumericField( + "sqlMinimumStringLength", "sqlMinimumStringLength" ) Column.COLUMN_MINS = KeywordField("columnMins", "columnMins") -Column.COLUMN_MISSING_VALUES_COUNT = NumericField( - "columnMissingValuesCount", "columnMissingValuesCount" +Column.SQL_MISSING_VALUES_COUNT = NumericField( + "sqlMissingValuesCount", "sqlMissingValuesCount" ) -Column.COLUMN_MISSING_VALUES_COUNT_LONG = NumericField( - "columnMissingValuesCountLong", "columnMissingValuesCountLong" +Column.SQL_MISSING_VALUES_COUNT_LONG = NumericField( + "sqlMissingValuesCountLong", "sqlMissingValuesCountLong" ) -Column.COLUMN_MISSING_VALUES_PERCENTAGE = NumericField( - "columnMissingValuesPercentage", "columnMissingValuesPercentage" +Column.SQL_MISSING_VALUES_PERCENTAGE = NumericField( + "sqlMissingValuesPercentage", "sqlMissingValuesPercentage" ) -Column.COLUMN_UNIQUENESS_PERCENTAGE = NumericField( - "columnUniquenessPercentage", "columnUniquenessPercentage" +Column.SQL_UNIQUENESS_PERCENTAGE = NumericField( + "sqlUniquenessPercentage", "sqlUniquenessPercentage" ) -Column.COLUMN_VARIANCE = NumericField("columnVariance", "columnVariance") +Column.SQL_VARIANCE = NumericField("sqlVariance", "sqlVariance") Column.COLUMN_TOP_VALUES = KeywordField("columnTopValues", "columnTopValues") -Column.COLUMN_MAX_VALUE = NumericField("columnMaxValue", "columnMaxValue") -Column.COLUMN_MIN_VALUE = NumericField("columnMinValue", "columnMinValue") -Column.COLUMN_MEAN_VALUE = NumericField("columnMeanValue", "columnMeanValue") -Column.COLUMN_SUM_VALUE = NumericField("columnSumValue", "columnSumValue") -Column.COLUMN_MEDIAN_VALUE = NumericField("columnMedianValue", "columnMedianValue") -Column.COLUMN_STANDARD_DEVIATION_VALUE = NumericField( - "columnStandardDeviationValue", "columnStandardDeviationValue" -) -Column.COLUMN_AVERAGE_VALUE = NumericField("columnAverageValue", "columnAverageValue") -Column.COLUMN_VARIANCE_VALUE = NumericField( - "columnVarianceValue", "columnVarianceValue" +Column.SQL_MAX_VALUE = NumericField("sqlMaxValue", "sqlMaxValue") +Column.SQL_MIN_VALUE = NumericField("sqlMinValue", "sqlMinValue") +Column.SQL_MEAN_VALUE = NumericField("sqlMeanValue", "sqlMeanValue") +Column.SQL_SUM_VALUE = NumericField("sqlSumValue", "sqlSumValue") +Column.SQL_MEDIAN_VALUE = NumericField("sqlMedianValue", "sqlMedianValue") +Column.SQL_STANDARD_DEVIATION_VALUE = NumericField( + "sqlStandardDeviationValue", "sqlStandardDeviationValue" ) -Column.COLUMN_AVERAGE_LENGTH_VALUE = NumericField( - "columnAverageLengthValue", "columnAverageLengthValue" +Column.SQL_AVERAGE_VALUE = NumericField("sqlAverageValue", "sqlAverageValue") +Column.SQL_VARIANCE_VALUE = NumericField("sqlVarianceValue", "sqlVarianceValue") +Column.SQL_AVERAGE_LENGTH_VALUE = NumericField( + "sqlAverageLengthValue", "sqlAverageLengthValue" ) -Column.COLUMN_DISTRIBUTION_HISTOGRAM = KeywordField( - "columnDistributionHistogram", "columnDistributionHistogram" +Column.SQL_DISTRIBUTION_HISTOGRAM = KeywordField( + "sqlDistributionHistogram", "sqlDistributionHistogram" ) -Column.COLUMN_DEPTH_LEVEL = NumericField("columnDepthLevel", "columnDepthLevel") +Column.SQL_DEPTH_LEVEL = NumericField("sqlDepthLevel", "sqlDepthLevel") Column.NOSQL_COLLECTION_NAME = KeywordField( "nosqlCollectionName", "nosqlCollectionName" ) Column.NOSQL_COLLECTION_QUALIFIED_NAME = KeywordField( "nosqlCollectionQualifiedName", "nosqlCollectionQualifiedName" ) -Column.COLUMN_IS_MEASURE = BooleanField("columnIsMeasure", "columnIsMeasure") -Column.COLUMN_MEASURE_TYPE = KeywordField("columnMeasureType", "columnMeasureType") +Column.SQL_IS_MEASURE = BooleanField("sqlIsMeasure", "sqlIsMeasure") +Column.SQL_MEASURE_TYPE = KeywordField("sqlMeasureType", "sqlMeasureType") Column.QUERY_COUNT = NumericField("queryCount", "queryCount") Column.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") Column.QUERY_USER_MAP = KeywordField("queryUserMap", "queryUserMap") diff --git a/pyatlan_v9/model/assets/column_process.py b/pyatlan_v9/model/assets/column_process.py index 4ca2d0692..45668c8c2 100644 --- a/pyatlan_v9/model/assets/column_process.py +++ b/pyatlan_v9/model/assets/column_process.py @@ -111,6 +111,8 @@ class ColumnProcess(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ColumnProcess" + code: Union[str, None, UnsetType] = UNSET """Code that ran within the process.""" @@ -242,72 +244,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ColumnProcess instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.process is UNSET: - errors.append("process is required for creation") - if errors: - raise ValueError(f"ColumnProcess validation failed: {errors}") - - def minimize(self) -> "ColumnProcess": - """ - Return a minimal copy of this ColumnProcess with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ColumnProcess with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ColumnProcess instance with only the minimum required fields. - """ - self.validate() - return ColumnProcess(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedColumnProcess": - """ - Create a :class:`RelatedColumnProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedColumnProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedColumnProcess(guid=self.guid) - return RelatedColumnProcess(qualified_name=self.qualified_name) - @staticmethod def _extract_guid(relationship: Any) -> Union[str, None]: """Extract guid from a relationship-like object.""" @@ -742,9 +678,6 @@ def _column_process_to_nested(column_process: ColumnProcess) -> ColumnProcessNes is_incomplete=column_process.is_incomplete, provenance_type=column_process.provenance_type, home_id=column_process.home_id, - depth=column_process.depth, - immediate_upstream=column_process.immediate_upstream, - immediate_downstream=column_process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -778,6 +711,7 @@ def _column_process_from_nested(nested: ColumnProcessNested) -> ColumnProcess: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -786,9 +720,6 @@ def _column_process_from_nested(nested: ColumnProcessNested) -> ColumnProcess: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_column_process_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/connection.py b/pyatlan_v9/model/assets/connection.py index f19921a48..04bf016e1 100644 --- a/pyatlan_v9/model/assets/connection.py +++ b/pyatlan_v9/model/assets/connection.py @@ -40,7 +40,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .connection_related import RelatedConnection from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -119,6 +118,8 @@ class Connection(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Connection" + category: Union[str, None, UnsetType] = UNSET """Type of connection, for example WAREHOUSE, RDBMS, etc.""" @@ -322,67 +323,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^default/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Connection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if errors: - raise ValueError(f"Connection validation failed: {errors}") - - def minimize(self) -> "Connection": - """ - Return a minimal copy of this Connection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Connection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Connection instance with only the minimum required fields. - """ - self.validate() - return Connection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedConnection": - """ - Create a :class:`RelatedConnection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedConnection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedConnection(guid=self.guid) - return RelatedConnection(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -969,9 +909,6 @@ def _connection_to_nested(connection: Connection) -> ConnectionNested: is_incomplete=connection.is_incomplete, provenance_type=connection.provenance_type, home_id=connection.home_id, - depth=connection.depth, - immediate_upstream=connection.immediate_upstream, - immediate_downstream=connection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1003,6 +940,7 @@ def _connection_from_nested(nested: ConnectionNested) -> Connection: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1011,9 +949,6 @@ def _connection_from_nested(nested: ConnectionNested) -> Connection: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_connection_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cosmos_mongo_db.py b/pyatlan_v9/model/assets/cosmos_mongo_db.py index cf328c7d8..b23a95cd0 100644 --- a/pyatlan_v9/model/assets/cosmos_mongo_db.py +++ b/pyatlan_v9/model/assets/cosmos_mongo_db.py @@ -38,7 +38,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cosmos_mongo_db_related import RelatedCosmosMongoDB from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -93,6 +92,8 @@ class CosmosMongoDB(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CosmosMongoDB" + no_sql_schema_definition: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="noSQLSchemaDefinition" ) @@ -193,66 +194,6 @@ class CosmosMongoDB(Asset): def __post_init__(self) -> None: self.type_name = "CosmosMongoDB" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CosmosMongoDB instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CosmosMongoDB validation failed: {errors}") - - def minimize(self) -> "CosmosMongoDB": - """ - Return a minimal copy of this CosmosMongoDB with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CosmosMongoDB with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CosmosMongoDB instance with only the minimum required fields. - """ - self.validate() - return CosmosMongoDB(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCosmosMongoDB": - """ - Create a :class:`RelatedCosmosMongoDB` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCosmosMongoDB reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCosmosMongoDB(guid=self.guid) - return RelatedCosmosMongoDB(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -512,9 +453,6 @@ def _cosmos_mongo_db_to_nested(cosmos_mongo_db: CosmosMongoDB) -> CosmosMongoDBN is_incomplete=cosmos_mongo_db.is_incomplete, provenance_type=cosmos_mongo_db.provenance_type, home_id=cosmos_mongo_db.home_id, - depth=cosmos_mongo_db.depth, - immediate_upstream=cosmos_mongo_db.immediate_upstream, - immediate_downstream=cosmos_mongo_db.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -548,6 +486,7 @@ def _cosmos_mongo_db_from_nested(nested: CosmosMongoDBNested) -> CosmosMongoDB: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -556,9 +495,6 @@ def _cosmos_mongo_db_from_nested(nested: CosmosMongoDBNested) -> CosmosMongoDB: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cosmos_mongo_db_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cosmos_mongo_db_account.py b/pyatlan_v9/model/assets/cosmos_mongo_db_account.py index 86a2c5c51..afd621da5 100644 --- a/pyatlan_v9/model/assets/cosmos_mongo_db_account.py +++ b/pyatlan_v9/model/assets/cosmos_mongo_db_account.py @@ -38,10 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cosmos_mongo_db_related import ( - RelatedCosmosMongoDBAccount, - RelatedCosmosMongoDBDatabase, -) +from .cosmos_mongo_db_related import RelatedCosmosMongoDBDatabase from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -113,6 +110,8 @@ class CosmosMongoDBAccount(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CosmosMongoDBAccount" + cosmos_mongo_db_account_instance_id: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="cosmosMongoDBAccountInstanceId" ) @@ -304,66 +303,6 @@ class CosmosMongoDBAccount(Asset): def __post_init__(self) -> None: self.type_name = "CosmosMongoDBAccount" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CosmosMongoDBAccount instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CosmosMongoDBAccount validation failed: {errors}") - - def minimize(self) -> "CosmosMongoDBAccount": - """ - Return a minimal copy of this CosmosMongoDBAccount with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CosmosMongoDBAccount with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CosmosMongoDBAccount instance with only the minimum required fields. - """ - self.validate() - return CosmosMongoDBAccount(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCosmosMongoDBAccount": - """ - Create a :class:`RelatedCosmosMongoDBAccount` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCosmosMongoDBAccount reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCosmosMongoDBAccount(guid=self.guid) - return RelatedCosmosMongoDBAccount(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -805,9 +744,6 @@ def _cosmos_mongo_db_account_to_nested( is_incomplete=cosmos_mongo_db_account.is_incomplete, provenance_type=cosmos_mongo_db_account.provenance_type, home_id=cosmos_mongo_db_account.home_id, - depth=cosmos_mongo_db_account.depth, - immediate_upstream=cosmos_mongo_db_account.immediate_upstream, - immediate_downstream=cosmos_mongo_db_account.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -843,6 +779,7 @@ def _cosmos_mongo_db_account_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -851,9 +788,6 @@ def _cosmos_mongo_db_account_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cosmos_mongo_db_account_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cosmos_mongo_db_collection.py b/pyatlan_v9/model/assets/cosmos_mongo_db_collection.py index 1aaaa0c38..bfc40f9f3 100644 --- a/pyatlan_v9/model/assets/cosmos_mongo_db_collection.py +++ b/pyatlan_v9/model/assets/cosmos_mongo_db_collection.py @@ -39,10 +39,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cosmos_mongo_db_related import ( - RelatedCosmosMongoDBCollection, - RelatedCosmosMongoDBDatabase, -) +from .cosmos_mongo_db_related import RelatedCosmosMongoDBDatabase from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .dbt_related import ( @@ -186,6 +183,8 @@ class CosmosMongoDBCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CosmosMongoDBCollection" + cosmos_mongo_db_database_qualified_name: Union[str, None, UnsetType] = ( msgspec.field(default=UNSET, name="cosmosMongoDBDatabaseQualifiedName") ) @@ -554,82 +553,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CosmosMongoDBCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cosmos_mongo_db_database is UNSET: - errors.append("cosmos_mongo_db_database is required for creation") - if self.cosmos_mongo_db_database_qualified_name is UNSET: - errors.append( - "cosmos_mongo_db_database_qualified_name is required for creation" - ) - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"CosmosMongoDBCollection validation failed: {errors}") - - def minimize(self) -> "CosmosMongoDBCollection": - """ - Return a minimal copy of this CosmosMongoDBCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CosmosMongoDBCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CosmosMongoDBCollection instance with only the minimum required fields. - """ - self.validate() - return CosmosMongoDBCollection( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedCosmosMongoDBCollection": - """ - Create a :class:`RelatedCosmosMongoDBCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCosmosMongoDBCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCosmosMongoDBCollection(guid=self.guid) - return RelatedCosmosMongoDBCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1319,9 +1242,6 @@ def _cosmos_mongo_db_collection_to_nested( is_incomplete=cosmos_mongo_db_collection.is_incomplete, provenance_type=cosmos_mongo_db_collection.provenance_type, home_id=cosmos_mongo_db_collection.home_id, - depth=cosmos_mongo_db_collection.depth, - immediate_upstream=cosmos_mongo_db_collection.immediate_upstream, - immediate_downstream=cosmos_mongo_db_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1357,6 +1277,7 @@ def _cosmos_mongo_db_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1365,9 +1286,6 @@ def _cosmos_mongo_db_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cosmos_mongo_db_collection_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cosmos_mongo_db_database.py b/pyatlan_v9/model/assets/cosmos_mongo_db_database.py index 0598bad65..1e893e205 100644 --- a/pyatlan_v9/model/assets/cosmos_mongo_db_database.py +++ b/pyatlan_v9/model/assets/cosmos_mongo_db_database.py @@ -42,7 +42,6 @@ from .cosmos_mongo_db_related import ( RelatedCosmosMongoDBAccount, RelatedCosmosMongoDBCollection, - RelatedCosmosMongoDBDatabase, ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric @@ -141,6 +140,8 @@ class CosmosMongoDBDatabase(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CosmosMongoDBDatabase" + cosmos_mongo_db_account_qualified_name: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="cosmosMongoDBAccountQualifiedName" ) @@ -362,76 +363,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CosmosMongoDBDatabase instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cosmos_mongo_db_account is UNSET: - errors.append("cosmos_mongo_db_account is required for creation") - if self.cosmos_mongo_db_account_qualified_name is UNSET: - errors.append( - "cosmos_mongo_db_account_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"CosmosMongoDBDatabase validation failed: {errors}") - - def minimize(self) -> "CosmosMongoDBDatabase": - """ - Return a minimal copy of this CosmosMongoDBDatabase with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CosmosMongoDBDatabase with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CosmosMongoDBDatabase instance with only the minimum required fields. - """ - self.validate() - return CosmosMongoDBDatabase(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCosmosMongoDBDatabase": - """ - Create a :class:`RelatedCosmosMongoDBDatabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCosmosMongoDBDatabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCosmosMongoDBDatabase(guid=self.guid) - return RelatedCosmosMongoDBDatabase(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -874,9 +805,6 @@ def _cosmos_mongo_db_database_to_nested( is_incomplete=cosmos_mongo_db_database.is_incomplete, provenance_type=cosmos_mongo_db_database.provenance_type, home_id=cosmos_mongo_db_database.home_id, - depth=cosmos_mongo_db_database.depth, - immediate_upstream=cosmos_mongo_db_database.immediate_upstream, - immediate_downstream=cosmos_mongo_db_database.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -912,6 +840,7 @@ def _cosmos_mongo_db_database_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -920,9 +849,6 @@ def _cosmos_mongo_db_database_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cosmos_mongo_db_database_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cube.py b/pyatlan_v9/model/assets/cube.py index db3631b47..ffaf92365 100644 --- a/pyatlan_v9/model/assets/cube.py +++ b/pyatlan_v9/model/assets/cube.py @@ -37,7 +37,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cube_related import RelatedCube, RelatedCubeDimension +from .cube_related import RelatedCubeDimension from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -99,6 +99,8 @@ class Cube(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cube" + cube_dimension_count: Union[int, None, UnsetType] = UNSET """Number of dimensions in the cube.""" @@ -218,66 +220,6 @@ class Cube(Asset): def __post_init__(self) -> None: self.type_name = "Cube" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cube instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cube validation failed: {errors}") - - def minimize(self) -> "Cube": - """ - Return a minimal copy of this Cube with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cube with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cube instance with only the minimum required fields. - """ - self.validate() - return Cube(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCube": - """ - Create a :class:`RelatedCube` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCube reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCube(guid=self.guid) - return RelatedCube(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -559,9 +501,6 @@ def _cube_to_nested(cube: Cube) -> CubeNested: is_incomplete=cube.is_incomplete, provenance_type=cube.provenance_type, home_id=cube.home_id, - depth=cube.depth, - immediate_upstream=cube.immediate_upstream, - immediate_downstream=cube.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -591,6 +530,7 @@ def _cube_from_nested(nested: CubeNested) -> Cube: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -599,9 +539,6 @@ def _cube_from_nested(nested: CubeNested) -> Cube: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cube_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cube_dimension.py b/pyatlan_v9/model/assets/cube_dimension.py index 0afa609fa..53b6dac81 100644 --- a/pyatlan_v9/model/assets/cube_dimension.py +++ b/pyatlan_v9/model/assets/cube_dimension.py @@ -102,6 +102,8 @@ class CubeDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CubeDimension" + cube_hierarchy_count: Union[int, None, UnsetType] = UNSET """Number of hierarchies in the cube dimension.""" @@ -233,76 +235,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CubeDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cube is UNSET: - errors.append("cube is required for creation") - if self.cube_name is UNSET: - errors.append("cube_name is required for creation") - if self.cube_qualified_name is UNSET: - errors.append("cube_qualified_name is required for creation") - if errors: - raise ValueError(f"CubeDimension validation failed: {errors}") - - def minimize(self) -> "CubeDimension": - """ - Return a minimal copy of this CubeDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CubeDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CubeDimension instance with only the minimum required fields. - """ - self.validate() - return CubeDimension(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCubeDimension": - """ - Create a :class:`RelatedCubeDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCubeDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCubeDimension(guid=self.guid) - return RelatedCubeDimension(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -600,9 +532,6 @@ def _cube_dimension_to_nested(cube_dimension: CubeDimension) -> CubeDimensionNes is_incomplete=cube_dimension.is_incomplete, provenance_type=cube_dimension.provenance_type, home_id=cube_dimension.home_id, - depth=cube_dimension.depth, - immediate_upstream=cube_dimension.immediate_upstream, - immediate_downstream=cube_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -636,6 +565,7 @@ def _cube_dimension_from_nested(nested: CubeDimensionNested) -> CubeDimension: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -644,9 +574,6 @@ def _cube_dimension_from_nested(nested: CubeDimensionNested) -> CubeDimension: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cube_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cube_field.py b/pyatlan_v9/model/assets/cube_field.py index c6a2a79ab..34a8b7527 100644 --- a/pyatlan_v9/model/assets/cube_field.py +++ b/pyatlan_v9/model/assets/cube_field.py @@ -108,6 +108,8 @@ class CubeField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CubeField" + cube_parent_field_name: Union[str, None, UnsetType] = UNSET """Name of the parent field in which this field is nested.""" @@ -259,84 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CubeField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cube_hierarchy is UNSET: - errors.append("cube_hierarchy is required for creation") - if self.cube_hierarchy_name is UNSET: - errors.append("cube_hierarchy_name is required for creation") - if self.cube_hierarchy_qualified_name is UNSET: - errors.append("cube_hierarchy_qualified_name is required for creation") - if self.cube_dimension_name is UNSET: - errors.append("cube_dimension_name is required for creation") - if self.cube_dimension_qualified_name is UNSET: - errors.append("cube_dimension_qualified_name is required for creation") - if self.cube_name is UNSET: - errors.append("cube_name is required for creation") - if self.cube_qualified_name is UNSET: - errors.append("cube_qualified_name is required for creation") - if errors: - raise ValueError(f"CubeField validation failed: {errors}") - - def minimize(self) -> "CubeField": - """ - Return a minimal copy of this CubeField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CubeField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CubeField instance with only the minimum required fields. - """ - self.validate() - return CubeField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCubeField": - """ - Create a :class:`RelatedCubeField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCubeField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCubeField(guid=self.guid) - return RelatedCubeField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -659,9 +583,6 @@ def _cube_field_to_nested(cube_field: CubeField) -> CubeFieldNested: is_incomplete=cube_field.is_incomplete, provenance_type=cube_field.provenance_type, home_id=cube_field.home_id, - depth=cube_field.depth, - immediate_upstream=cube_field.immediate_upstream, - immediate_downstream=cube_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -693,6 +614,7 @@ def _cube_field_from_nested(nested: CubeFieldNested) -> CubeField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -701,9 +623,6 @@ def _cube_field_from_nested(nested: CubeFieldNested) -> CubeField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cube_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/cube_hierarchy.py b/pyatlan_v9/model/assets/cube_hierarchy.py index 53e34ee1b..2ac01a128 100644 --- a/pyatlan_v9/model/assets/cube_hierarchy.py +++ b/pyatlan_v9/model/assets/cube_hierarchy.py @@ -38,7 +38,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cube_related import RelatedCubeDimension, RelatedCubeField, RelatedCubeHierarchy +from .cube_related import RelatedCubeDimension, RelatedCubeField from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -102,6 +102,8 @@ class CubeHierarchy(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CubeHierarchy" + cube_field_count: Union[int, None, UnsetType] = UNSET """Number of total fields in the cube hierarchy.""" @@ -235,80 +237,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CubeHierarchy instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cube_dimension is UNSET: - errors.append("cube_dimension is required for creation") - if self.cube_dimension_name is UNSET: - errors.append("cube_dimension_name is required for creation") - if self.cube_dimension_qualified_name is UNSET: - errors.append("cube_dimension_qualified_name is required for creation") - if self.cube_name is UNSET: - errors.append("cube_name is required for creation") - if self.cube_qualified_name is UNSET: - errors.append("cube_qualified_name is required for creation") - if errors: - raise ValueError(f"CubeHierarchy validation failed: {errors}") - - def minimize(self) -> "CubeHierarchy": - """ - Return a minimal copy of this CubeHierarchy with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CubeHierarchy with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CubeHierarchy instance with only the minimum required fields. - """ - self.validate() - return CubeHierarchy(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCubeHierarchy": - """ - Create a :class:`RelatedCubeHierarchy` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCubeHierarchy reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCubeHierarchy(guid=self.guid) - return RelatedCubeHierarchy(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -606,9 +534,6 @@ def _cube_hierarchy_to_nested(cube_hierarchy: CubeHierarchy) -> CubeHierarchyNes is_incomplete=cube_hierarchy.is_incomplete, provenance_type=cube_hierarchy.provenance_type, home_id=cube_hierarchy.home_id, - depth=cube_hierarchy.depth, - immediate_upstream=cube_hierarchy.immediate_upstream, - immediate_downstream=cube_hierarchy.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -642,6 +567,7 @@ def _cube_hierarchy_from_nested(nested: CubeHierarchyNested) -> CubeHierarchy: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -650,9 +576,6 @@ def _cube_hierarchy_from_nested(nested: CubeHierarchyNested) -> CubeHierarchy: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cube_hierarchy_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/custom.py b/pyatlan_v9/model/assets/custom.py index 8dbb86abb..e93deb894 100644 --- a/pyatlan_v9/model/assets/custom.py +++ b/pyatlan_v9/model/assets/custom.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .custom_related import RelatedCustom from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class Custom(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Custom" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class Custom(Asset): def __post_init__(self) -> None: self.type_name = "Custom" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Custom instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Custom validation failed: {errors}") - - def minimize(self) -> "Custom": - """ - Return a minimal copy of this Custom with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Custom with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Custom instance with only the minimum required fields. - """ - self.validate() - return Custom(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCustom": - """ - Create a :class:`RelatedCustom` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCustom reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCustom(guid=self.guid) - return RelatedCustom(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _custom_to_nested(custom: Custom) -> CustomNested: is_incomplete=custom.is_incomplete, provenance_type=custom.provenance_type, home_id=custom.home_id, - depth=custom.depth, - immediate_upstream=custom.immediate_upstream, - immediate_downstream=custom.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -525,6 +463,7 @@ def _custom_from_nested(nested: CustomNested) -> Custom: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -533,9 +472,6 @@ def _custom_from_nested(nested: CustomNested) -> Custom: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_custom_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/custom_entity.py b/pyatlan_v9/model/assets/custom_entity.py index a86e231e9..cc23914cf 100644 --- a/pyatlan_v9/model/assets/custom_entity.py +++ b/pyatlan_v9/model/assets/custom_entity.py @@ -98,6 +98,8 @@ class CustomEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CustomEntity" + custom_children_subtype: Union[str, None, UnsetType] = UNSET """Label of the children column for this asset type.""" @@ -218,70 +220,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CustomEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"CustomEntity validation failed: {errors}") - - def minimize(self) -> "CustomEntity": - """ - Return a minimal copy of this CustomEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CustomEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CustomEntity instance with only the minimum required fields. - """ - self.validate() - return CustomEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCustomEntity": - """ - Create a :class:`RelatedCustomEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCustomEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCustomEntity(guid=self.guid) - return RelatedCustomEntity(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -587,9 +525,6 @@ def _custom_entity_to_nested(custom_entity: CustomEntity) -> CustomEntityNested: is_incomplete=custom_entity.is_incomplete, provenance_type=custom_entity.provenance_type, home_id=custom_entity.home_id, - depth=custom_entity.depth, - immediate_upstream=custom_entity.immediate_upstream, - immediate_downstream=custom_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -623,6 +558,7 @@ def _custom_entity_from_nested(nested: CustomEntityNested) -> CustomEntity: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -631,9 +567,6 @@ def _custom_entity_from_nested(nested: CustomEntityNested) -> CustomEntity: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_custom_entity_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/data_domain.py b/pyatlan_v9/model/assets/data_domain.py index 67a072d38..fdb1c05fd 100644 --- a/pyatlan_v9/model/assets/data_domain.py +++ b/pyatlan_v9/model/assets/data_domain.py @@ -98,6 +98,8 @@ class DataDomain(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataDomain" + parent_domain_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent domain in which this asset exists.""" @@ -211,66 +213,6 @@ class DataDomain(Asset): def __post_init__(self) -> None: self.type_name = "DataDomain" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataDomain instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataDomain validation failed: {errors}") - - def minimize(self) -> "DataDomain": - """ - Return a minimal copy of this DataDomain with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataDomain with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataDomain instance with only the minimum required fields. - """ - self.validate() - return DataDomain(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataDomain": - """ - Create a :class:`RelatedDataDomain` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataDomain reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataDomain(guid=self.guid) - return RelatedDataDomain(qualified_name=self.qualified_name) - @classmethod def _get_super_domain_qualified_name( cls, domain_qualified_name: str @@ -604,9 +546,6 @@ def _data_domain_to_nested(data_domain: DataDomain) -> DataDomainNested: is_incomplete=data_domain.is_incomplete, provenance_type=data_domain.provenance_type, home_id=data_domain.home_id, - depth=data_domain.depth, - immediate_upstream=data_domain.immediate_upstream, - immediate_downstream=data_domain.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -638,6 +577,7 @@ def _data_domain_from_nested(nested: DataDomainNested) -> DataDomain: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -646,9 +586,6 @@ def _data_domain_from_nested(nested: DataDomainNested) -> DataDomain: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_domain_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/data_mesh.py b/pyatlan_v9/model/assets/data_mesh.py index 2445ceb41..a7f2d7865 100644 --- a/pyatlan_v9/model/assets/data_mesh.py +++ b/pyatlan_v9/model/assets/data_mesh.py @@ -37,7 +37,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .data_mesh_related import RelatedDataMesh, RelatedDataProduct +from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -92,6 +92,8 @@ class DataMesh(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataMesh" + parent_domain_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent domain in which this asset exists.""" @@ -193,66 +195,6 @@ class DataMesh(Asset): def __post_init__(self) -> None: self.type_name = "DataMesh" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataMesh instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataMesh validation failed: {errors}") - - def minimize(self) -> "DataMesh": - """ - Return a minimal copy of this DataMesh with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataMesh with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataMesh instance with only the minimum required fields. - """ - self.validate() - return DataMesh(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataMesh": - """ - Create a :class:`RelatedDataMesh` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataMesh reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataMesh(guid=self.guid) - return RelatedDataMesh(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -509,9 +451,6 @@ def _data_mesh_to_nested(data_mesh: DataMesh) -> DataMeshNested: is_incomplete=data_mesh.is_incomplete, provenance_type=data_mesh.provenance_type, home_id=data_mesh.home_id, - depth=data_mesh.depth, - immediate_upstream=data_mesh.immediate_upstream, - immediate_downstream=data_mesh.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -543,6 +482,7 @@ def _data_mesh_from_nested(nested: DataMeshNested) -> DataMesh: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -551,9 +491,6 @@ def _data_mesh_from_nested(nested: DataMeshNested) -> DataMesh: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_mesh_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/data_mesh_related.py b/pyatlan_v9/model/assets/data_mesh_related.py index 95e1adc19..23ebce2d1 100644 --- a/pyatlan_v9/model/assets/data_mesh_related.py +++ b/pyatlan_v9/model/assets/data_mesh_related.py @@ -124,7 +124,7 @@ class RelatedDataProduct(RelatedDataMesh): data_product_score_value: Union[float, None, UnsetType] = UNSET """Score of this data product.""" - data_product_score_updated_at: Union[int, None, UnsetType] = UNSET + data_mesh_score_updated_at: Union[int, None, UnsetType] = UNSET """Timestamp when the score of this data product was last updated.""" daap_visibility_users: Union[List[str], None, UnsetType] = UNSET diff --git a/pyatlan_v9/model/assets/data_product.py b/pyatlan_v9/model/assets/data_product.py index d9889007a..72d9711c7 100644 --- a/pyatlan_v9/model/assets/data_product.py +++ b/pyatlan_v9/model/assets/data_product.py @@ -81,7 +81,7 @@ class DataProduct(Asset): DATA_PRODUCT_ASSETS_DSL: ClassVar[Any] = None DATA_PRODUCT_ASSETS_PLAYBOOK_FILTER: ClassVar[Any] = None DATA_PRODUCT_SCORE_VALUE: ClassVar[Any] = None - DATA_PRODUCT_SCORE_UPDATED_AT: ClassVar[Any] = None + DATA_MESH_SCORE_UPDATED_AT: ClassVar[Any] = None DAAP_VISIBILITY_USERS: ClassVar[Any] = None DAAP_VISIBILITY_GROUPS: ClassVar[Any] = None DAAP_OUTPUT_PORT_GUIDS: ClassVar[Any] = None @@ -122,6 +122,8 @@ class DataProduct(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None STARBURST_DATASETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataProduct" + data_product_status: Union[str, None, UnsetType] = UNSET """Status of this data product.""" @@ -157,7 +159,7 @@ class DataProduct(Asset): data_product_score_value: Union[float, None, UnsetType] = UNSET """Score of this data product.""" - data_product_score_updated_at: Union[int, None, UnsetType] = UNSET + data_mesh_score_updated_at: Union[int, None, UnsetType] = UNSET """Timestamp when the score of this data product was last updated.""" daap_visibility_users: Union[List[str], None, UnsetType] = UNSET @@ -294,70 +296,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/product/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataProduct instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.data_domain is UNSET: - errors.append("data_domain is required for creation") - if errors: - raise ValueError(f"DataProduct validation failed: {errors}") - - def minimize(self) -> "DataProduct": - """ - Return a minimal copy of this DataProduct with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataProduct with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataProduct instance with only the minimum required fields. - """ - self.validate() - return DataProduct(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataProduct": - """ - Create a :class:`RelatedDataProduct` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataProduct reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataProduct(guid=self.guid) - return RelatedDataProduct(qualified_name=self.qualified_name) - @classmethod def _get_super_domain_qualified_name( cls, domain_qualified_name: str @@ -528,7 +466,7 @@ class DataProductAttributes(AssetAttributes): data_product_score_value: Union[float, None, UnsetType] = UNSET """Score of this data product.""" - data_product_score_updated_at: Union[int, None, UnsetType] = UNSET + data_mesh_score_updated_at: Union[int, None, UnsetType] = UNSET """Timestamp when the score of this data product was last updated.""" daap_visibility_users: Union[List[str], None, UnsetType] = UNSET @@ -731,7 +669,7 @@ def _populate_data_product_attrs( attrs.data_product_assets_dsl = obj.data_product_assets_dsl attrs.data_product_assets_playbook_filter = obj.data_product_assets_playbook_filter attrs.data_product_score_value = obj.data_product_score_value - attrs.data_product_score_updated_at = obj.data_product_score_updated_at + attrs.data_mesh_score_updated_at = obj.data_mesh_score_updated_at attrs.daap_visibility_users = obj.daap_visibility_users attrs.daap_visibility_groups = obj.daap_visibility_groups attrs.daap_output_port_guids = obj.daap_output_port_guids @@ -757,7 +695,7 @@ def _extract_data_product_attrs(attrs: DataProductAttributes) -> dict: attrs.data_product_assets_playbook_filter ) result["data_product_score_value"] = attrs.data_product_score_value - result["data_product_score_updated_at"] = attrs.data_product_score_updated_at + result["data_mesh_score_updated_at"] = attrs.data_mesh_score_updated_at result["daap_visibility_users"] = attrs.daap_visibility_users result["daap_visibility_groups"] = attrs.daap_visibility_groups result["daap_output_port_guids"] = attrs.daap_output_port_guids @@ -801,9 +739,6 @@ def _data_product_to_nested(data_product: DataProduct) -> DataProductNested: is_incomplete=data_product.is_incomplete, provenance_type=data_product.provenance_type, home_id=data_product.home_id, - depth=data_product.depth, - immediate_upstream=data_product.immediate_upstream, - immediate_downstream=data_product.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -835,6 +770,7 @@ def _data_product_from_nested(nested: DataProductNested) -> DataProduct: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -843,9 +779,6 @@ def _data_product_from_nested(nested: DataProductNested) -> DataProduct: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_product_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -896,8 +829,8 @@ def _data_product_from_nested_bytes(data: bytes, serde: Serde) -> DataProduct: DataProduct.DATA_PRODUCT_SCORE_VALUE = NumericField( "dataProductScoreValue", "dataProductScoreValue" ) -DataProduct.DATA_PRODUCT_SCORE_UPDATED_AT = NumericField( - "dataProductScoreUpdatedAt", "dataProductScoreUpdatedAt" +DataProduct.DATA_MESH_SCORE_UPDATED_AT = NumericField( + "dataMeshScoreUpdatedAt", "dataMeshScoreUpdatedAt" ) DataProduct.DAAP_VISIBILITY_USERS = KeywordField( "daapVisibilityUsers", "daapVisibilityUsers" diff --git a/pyatlan_v9/model/assets/data_quality.py b/pyatlan_v9/model/assets/data_quality.py index c8283d091..88dfb9239 100644 --- a/pyatlan_v9/model/assets/data_quality.py +++ b/pyatlan_v9/model/assets/data_quality.py @@ -38,11 +38,7 @@ _populate_asset_attrs, ) from .data_mesh_related import RelatedDataProduct -from .data_quality_related import ( - RelatedDataQuality, - RelatedDataQualityRule, - RelatedMetric, -) +from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -95,6 +91,8 @@ class DataQuality(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataQuality" + dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET """Whether this data quality is part of contract (true) or not (false).""" @@ -193,66 +191,6 @@ class DataQuality(Asset): def __post_init__(self) -> None: self.type_name = "DataQuality" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataQuality instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataQuality validation failed: {errors}") - - def minimize(self) -> "DataQuality": - """ - Return a minimal copy of this DataQuality with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataQuality with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataQuality instance with only the minimum required fields. - """ - self.validate() - return DataQuality(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataQuality": - """ - Create a :class:`RelatedDataQuality` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataQuality reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataQuality(guid=self.guid) - return RelatedDataQuality(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -506,9 +444,6 @@ def _data_quality_to_nested(data_quality: DataQuality) -> DataQualityNested: is_incomplete=data_quality.is_incomplete, provenance_type=data_quality.provenance_type, home_id=data_quality.home_id, - depth=data_quality.depth, - immediate_upstream=data_quality.immediate_upstream, - immediate_downstream=data_quality.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -540,6 +475,7 @@ def _data_quality_from_nested(nested: DataQualityNested) -> DataQuality: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -548,9 +484,6 @@ def _data_quality_from_nested(nested: DataQualityNested) -> DataQuality: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_quality_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/data_quality_rule.py b/pyatlan_v9/model/assets/data_quality_rule.py index c35757106..21f75b8ba 100644 --- a/pyatlan_v9/model/assets/data_quality_rule.py +++ b/pyatlan_v9/model/assets/data_quality_rule.py @@ -146,6 +146,8 @@ class DataQualityRule(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataQualityRule" + dq_rule_base_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Base dataset qualified name that attached to this rule.""" @@ -340,72 +342,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataQualityRule instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dq_rule_template is UNSET: - errors.append("dq_rule_template is required for creation") - if errors: - raise ValueError(f"DataQualityRule validation failed: {errors}") - - def minimize(self) -> "DataQualityRule": - """ - Return a minimal copy of this DataQualityRule with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataQualityRule with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataQualityRule instance with only the minimum required fields. - """ - self.validate() - return DataQualityRule(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataQualityRule": - """ - Create a :class:`RelatedDataQualityRule` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataQualityRule reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataQualityRule(guid=self.guid) - return RelatedDataQualityRule(qualified_name=self.qualified_name) - @classmethod @init_guid def custom_sql_creator( @@ -1501,9 +1437,6 @@ def _data_quality_rule_to_nested( is_incomplete=data_quality_rule.is_incomplete, provenance_type=data_quality_rule.provenance_type, home_id=data_quality_rule.home_id, - depth=data_quality_rule.depth, - immediate_upstream=data_quality_rule.immediate_upstream, - immediate_downstream=data_quality_rule.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1537,6 +1470,7 @@ def _data_quality_rule_from_nested(nested: DataQualityRuleNested) -> DataQuality updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1545,9 +1479,6 @@ def _data_quality_rule_from_nested(nested: DataQualityRuleNested) -> DataQuality is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_quality_rule_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/data_quality_rule_template.py b/pyatlan_v9/model/assets/data_quality_rule_template.py index fc765dc79..1e207a134 100644 --- a/pyatlan_v9/model/assets/data_quality_rule_template.py +++ b/pyatlan_v9/model/assets/data_quality_rule_template.py @@ -38,11 +38,7 @@ _populate_asset_attrs, ) from .data_mesh_related import RelatedDataProduct -from .data_quality_related import ( - RelatedDataQualityRule, - RelatedDataQualityRuleTemplate, - RelatedMetric, -) +from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -99,6 +95,8 @@ class DataQualityRuleTemplate(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataQualityRuleTemplate" + dq_rule_template_dimension: Union[str, None, UnsetType] = UNSET """Name of the dimension the rule belongs to.""" @@ -209,68 +207,6 @@ class DataQualityRuleTemplate(Asset): def __post_init__(self) -> None: self.type_name = "DataQualityRuleTemplate" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataQualityRuleTemplate instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataQualityRuleTemplate validation failed: {errors}") - - def minimize(self) -> "DataQualityRuleTemplate": - """ - Return a minimal copy of this DataQualityRuleTemplate with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataQualityRuleTemplate with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataQualityRuleTemplate instance with only the minimum required fields. - """ - self.validate() - return DataQualityRuleTemplate( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDataQualityRuleTemplate": - """ - Create a :class:`RelatedDataQualityRuleTemplate` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataQualityRuleTemplate reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataQualityRuleTemplate(guid=self.guid) - return RelatedDataQualityRuleTemplate(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -555,9 +491,6 @@ def _data_quality_rule_template_to_nested( is_incomplete=data_quality_rule_template.is_incomplete, provenance_type=data_quality_rule_template.provenance_type, home_id=data_quality_rule_template.home_id, - depth=data_quality_rule_template.depth, - immediate_upstream=data_quality_rule_template.immediate_upstream, - immediate_downstream=data_quality_rule_template.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -593,6 +526,7 @@ def _data_quality_rule_template_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -601,9 +535,6 @@ def _data_quality_rule_template_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_quality_rule_template_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/data_set.py b/pyatlan_v9/model/assets/data_set.py index f02d42bde..56d1ca743 100644 --- a/pyatlan_v9/model/assets/data_set.py +++ b/pyatlan_v9/model/assets/data_set.py @@ -27,7 +27,6 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedDataSet from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -74,6 +73,7 @@ class DataSet(Referenceable): ANNOUNCEMENT_TYPE: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_AT: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_BY: ClassVar[Any] = None + ASSET_ANNOUNCEMENT_EXPIRED_AT: ClassVar[Any] = None OWNER_USERS: ClassVar[Any] = None OWNER_GROUPS: ClassVar[Any] = None ADMIN_USERS: ClassVar[Any] = None @@ -263,6 +263,8 @@ class DataSet(Referenceable): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataSet" + name: Union[str, None, UnsetType] = UNSET """Name of this asset. Fallback for display purposes, if displayName is empty.""" @@ -317,6 +319,9 @@ class DataSet(Referenceable): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -990,66 +995,6 @@ class DataSet(Referenceable): def __post_init__(self) -> None: self.type_name = "DataSet" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataSet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataSet validation failed: {errors}") - - def minimize(self) -> "DataSet": - """ - Return a minimal copy of this DataSet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataSet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataSet instance with only the minimum required fields. - """ - self.validate() - return DataSet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataSet": - """ - Create a :class:`RelatedDataSet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataSet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataSet(guid=self.guid) - return RelatedDataSet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1159,6 +1104,9 @@ class DataSetAttributes(ReferenceableAttributes): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -1899,6 +1847,7 @@ def _populate_data_set_attrs(attrs: DataSetAttributes, obj: DataSet) -> None: attrs.announcement_type = obj.announcement_type attrs.announcement_updated_at = obj.announcement_updated_at attrs.announcement_updated_by = obj.announcement_updated_by + attrs.asset_announcement_expired_at = obj.asset_announcement_expired_at attrs.owner_users = obj.owner_users attrs.owner_groups = obj.owner_groups attrs.admin_users = obj.admin_users @@ -2144,6 +2093,7 @@ def _extract_data_set_attrs(attrs: DataSetAttributes) -> dict: result["announcement_type"] = attrs.announcement_type result["announcement_updated_at"] = attrs.announcement_updated_at result["announcement_updated_by"] = attrs.announcement_updated_by + result["asset_announcement_expired_at"] = attrs.asset_announcement_expired_at result["owner_users"] = attrs.owner_users result["owner_groups"] = attrs.owner_groups result["admin_users"] = attrs.admin_users @@ -2432,9 +2382,6 @@ def _data_set_to_nested(data_set: DataSet) -> DataSetNested: is_incomplete=data_set.is_incomplete, provenance_type=data_set.provenance_type, home_id=data_set.home_id, - depth=data_set.depth, - immediate_upstream=data_set.immediate_upstream, - immediate_downstream=data_set.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2464,6 +2411,7 @@ def _data_set_from_nested(nested: DataSetNested) -> DataSet: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2472,9 +2420,6 @@ def _data_set_from_nested(nested: DataSetNested) -> DataSet: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_set_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2545,6 +2490,9 @@ def _data_set_from_nested_bytes(data: bytes, serde: Serde) -> DataSet: DataSet.ANNOUNCEMENT_UPDATED_BY = KeywordField( "announcementUpdatedBy", "announcementUpdatedBy" ) +DataSet.ASSET_ANNOUNCEMENT_EXPIRED_AT = NumericField( + "assetAnnouncementExpiredAt", "assetAnnouncementExpiredAt" +) DataSet.OWNER_USERS = KeywordField("ownerUsers", "ownerUsers") DataSet.OWNER_GROUPS = KeywordField("ownerGroups", "ownerGroups") DataSet.ADMIN_USERS = KeywordField("adminUsers", "adminUsers") diff --git a/pyatlan_v9/model/assets/data_studio.py b/pyatlan_v9/model/assets/data_studio.py index 6f8299df4..e9c4f75e1 100644 --- a/pyatlan_v9/model/assets/data_studio.py +++ b/pyatlan_v9/model/assets/data_studio.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .data_studio_related import RelatedDataStudio from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -100,6 +99,8 @@ class DataStudio(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataStudio" + google_service: Union[str, None, UnsetType] = UNSET """Service in Google in which the asset exists.""" @@ -222,66 +223,6 @@ class DataStudio(Asset): def __post_init__(self) -> None: self.type_name = "DataStudio" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataStudio instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataStudio validation failed: {errors}") - - def minimize(self) -> "DataStudio": - """ - Return a minimal copy of this DataStudio with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataStudio with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataStudio instance with only the minimum required fields. - """ - self.validate() - return DataStudio(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataStudio": - """ - Create a :class:`RelatedDataStudio` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataStudio reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataStudio(guid=self.guid) - return RelatedDataStudio(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -573,9 +514,6 @@ def _data_studio_to_nested(data_studio: DataStudio) -> DataStudioNested: is_incomplete=data_studio.is_incomplete, provenance_type=data_studio.provenance_type, home_id=data_studio.home_id, - depth=data_studio.depth, - immediate_upstream=data_studio.immediate_upstream, - immediate_downstream=data_studio.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -607,6 +545,7 @@ def _data_studio_from_nested(nested: DataStudioNested) -> DataStudio: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -615,9 +554,6 @@ def _data_studio_from_nested(nested: DataStudioNested) -> DataStudio: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_studio_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/data_studio_asset.py b/pyatlan_v9/model/assets/data_studio_asset.py index 2f7aaa5b0..d1a198357 100644 --- a/pyatlan_v9/model/assets/data_studio_asset.py +++ b/pyatlan_v9/model/assets/data_studio_asset.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .data_studio_related import RelatedDataStudioAsset from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -105,6 +104,8 @@ class DataStudioAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataStudioAsset" + data_studio_asset_type: Union[str, None, UnsetType] = UNSET """Type of the Google Data Studio asset, for example: REPORT or DATA_SOURCE.""" @@ -239,69 +240,6 @@ class DataStudioAsset(Asset): def __post_init__(self) -> None: self.type_name = "DataStudioAsset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataStudioAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.data_studio_asset_type is UNSET: - errors.append("data_studio_asset_type is required for creation") - if errors: - raise ValueError(f"DataStudioAsset validation failed: {errors}") - - def minimize(self) -> "DataStudioAsset": - """ - Return a minimal copy of this DataStudioAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataStudioAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataStudioAsset instance with only the minimum required fields. - """ - self.validate() - return DataStudioAsset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataStudioAsset": - """ - Create a :class:`RelatedDataStudioAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataStudioAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataStudioAsset(guid=self.guid) - return RelatedDataStudioAsset(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -660,9 +598,6 @@ def _data_studio_asset_to_nested( is_incomplete=data_studio_asset.is_incomplete, provenance_type=data_studio_asset.provenance_type, home_id=data_studio_asset.home_id, - depth=data_studio_asset.depth, - immediate_upstream=data_studio_asset.immediate_upstream, - immediate_downstream=data_studio_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -696,6 +631,7 @@ def _data_studio_asset_from_nested(nested: DataStudioAssetNested) -> DataStudioA updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -704,9 +640,6 @@ def _data_studio_asset_from_nested(nested: DataStudioAssetNested) -> DataStudioA is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_studio_asset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/database.py b/pyatlan_v9/model/assets/database.py index 97d9b6a0a..0f11c0010 100644 --- a/pyatlan_v9/model/assets/database.py +++ b/pyatlan_v9/model/assets/database.py @@ -59,7 +59,7 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_related import RelatedDatabase, RelatedSchema +from .sql_related import RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -129,6 +129,8 @@ class Database(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Database" + schema_count: Union[int, None, UnsetType] = UNSET """Number of schemas in this database.""" @@ -314,66 +316,6 @@ class Database(Asset): def __post_init__(self) -> None: self.type_name = "Database" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Database instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Database validation failed: {errors}") - - def minimize(self) -> "Database": - """ - Return a minimal copy of this Database with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Database with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Database instance with only the minimum required fields. - """ - self.validate() - return Database(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabase": - """ - Create a :class:`RelatedDatabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabase(guid=self.guid) - return RelatedDatabase(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -817,9 +759,6 @@ def _database_to_nested(database: Database) -> DatabaseNested: is_incomplete=database.is_incomplete, provenance_type=database.provenance_type, home_id=database.home_id, - depth=database.depth, - immediate_upstream=database.immediate_upstream, - immediate_downstream=database.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -851,6 +790,7 @@ def _database_from_nested(nested: DatabaseNested) -> Database: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -859,9 +799,6 @@ def _database_from_nested(nested: DatabaseNested) -> Database: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_database_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/databricks.py b/pyatlan_v9/model/assets/databricks.py index abdeaa8ba..f89d53fb6 100644 --- a/pyatlan_v9/model/assets/databricks.py +++ b/pyatlan_v9/model/assets/databricks.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricks from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -124,6 +123,8 @@ class Databricks(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Databricks" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -300,66 +301,6 @@ class Databricks(Asset): def __post_init__(self) -> None: self.type_name = "Databricks" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Databricks instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Databricks validation failed: {errors}") - - def minimize(self) -> "Databricks": - """ - Return a minimal copy of this Databricks with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Databricks with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Databricks instance with only the minimum required fields. - """ - self.validate() - return Databricks(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricks": - """ - Create a :class:`RelatedDatabricks` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricks reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricks(guid=self.guid) - return RelatedDatabricks(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -732,9 +673,6 @@ def _databricks_to_nested(databricks: Databricks) -> DatabricksNested: is_incomplete=databricks.is_incomplete, provenance_type=databricks.provenance_type, home_id=databricks.home_id, - depth=databricks.depth, - immediate_upstream=databricks.immediate_upstream, - immediate_downstream=databricks.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -766,6 +704,7 @@ def _databricks_from_nested(nested: DatabricksNested) -> Databricks: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -774,9 +713,6 @@ def _databricks_from_nested(nested: DatabricksNested) -> Databricks: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/databricks_ai_model_context.py b/pyatlan_v9/model/assets/databricks_ai_model_context.py index d3b3f88d8..37a0a29d2 100644 --- a/pyatlan_v9/model/assets/databricks_ai_model_context.py +++ b/pyatlan_v9/model/assets/databricks_ai_model_context.py @@ -42,10 +42,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import ( - RelatedDatabricksAIModelContext, - RelatedDatabricksAIModelVersion, -) +from .databricks_related import RelatedDatabricksAIModelVersion from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -76,7 +73,7 @@ class DatabricksAIModelContext(Asset): Instance of an ai model in databricks. """ - DATABRICKS_AI_MODEL_CONTEXT_METASTORE_ID: ClassVar[Any] = None + DATABRICKS_METASTORE_ID: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -145,9 +142,9 @@ class DatabricksAIModelContext(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - databricks_ai_model_context_metastore_id: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelContextMetastoreId") - ) + type_name: Union[str, UnsetType] = "DatabricksAIModelContext" + + databricks_metastore_id: Union[str, None, UnsetType] = UNSET """The id of the model, common across versions.""" query_count: Union[int, None, UnsetType] = UNSET @@ -396,80 +393,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksAIModelContext instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DatabricksAIModelContext validation failed: {errors}") - - def minimize(self) -> "DatabricksAIModelContext": - """ - Return a minimal copy of this DatabricksAIModelContext with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksAIModelContext with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksAIModelContext instance with only the minimum required fields. - """ - self.validate() - return DatabricksAIModelContext( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDatabricksAIModelContext": - """ - Create a :class:`RelatedDatabricksAIModelContext` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksAIModelContext reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksAIModelContext(guid=self.guid) - return RelatedDatabricksAIModelContext(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -527,9 +450,7 @@ def from_json( class DatabricksAIModelContextAttributes(AssetAttributes): """DatabricksAIModelContext-specific attributes for nested API format.""" - databricks_ai_model_context_metastore_id: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelContextMetastoreId") - ) + databricks_metastore_id: Union[str, None, UnsetType] = UNSET """The id of the model, common across versions.""" query_count: Union[int, None, UnsetType] = UNSET @@ -840,9 +761,7 @@ def _populate_databricks_ai_model_context_attrs( ) -> None: """Populate DatabricksAIModelContext-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_ai_model_context_metastore_id = ( - obj.databricks_ai_model_context_metastore_id - ) + attrs.databricks_metastore_id = obj.databricks_metastore_id attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -882,9 +801,7 @@ def _extract_databricks_ai_model_context_attrs( ) -> dict: """Extract all DatabricksAIModelContext attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_ai_model_context_metastore_id"] = ( - attrs.databricks_ai_model_context_metastore_id - ) + result["databricks_metastore_id"] = attrs.databricks_metastore_id result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -961,9 +878,6 @@ def _databricks_ai_model_context_to_nested( is_incomplete=databricks_ai_model_context.is_incomplete, provenance_type=databricks_ai_model_context.provenance_type, home_id=databricks_ai_model_context.home_id, - depth=databricks_ai_model_context.depth, - immediate_upstream=databricks_ai_model_context.immediate_upstream, - immediate_downstream=databricks_ai_model_context.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -999,6 +913,7 @@ def _databricks_ai_model_context_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1007,9 +922,6 @@ def _databricks_ai_model_context_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_ai_model_context_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1043,8 +955,8 @@ def _databricks_ai_model_context_from_nested_bytes( RelationField, ) -DatabricksAIModelContext.DATABRICKS_AI_MODEL_CONTEXT_METASTORE_ID = KeywordField( - "databricksAIModelContextMetastoreId", "databricksAIModelContextMetastoreId" +DatabricksAIModelContext.DATABRICKS_METASTORE_ID = KeywordField( + "databricksMetastoreId", "databricksMetastoreId" ) DatabricksAIModelContext.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksAIModelContext.QUERY_USER_COUNT = NumericField( diff --git a/pyatlan_v9/model/assets/databricks_ai_model_version.py b/pyatlan_v9/model/assets/databricks_ai_model_version.py index 624bc207c..cc949411d 100644 --- a/pyatlan_v9/model/assets/databricks_ai_model_version.py +++ b/pyatlan_v9/model/assets/databricks_ai_model_version.py @@ -42,10 +42,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import ( - RelatedDatabricksAIModelContext, - RelatedDatabricksAIModelVersion, -) +from .databricks_related import RelatedDatabricksAIModelContext from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -75,18 +72,18 @@ class DatabricksAIModelVersion(Asset): Instance of an ai model version in databricks. """ - DATABRICKS_AI_MODEL_VERSION_ID: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_RUN_ID: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_RUN_NAME: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_RUN_START_TIME: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_RUN_END_TIME: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_STATUS: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_ALIASES: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_DATASET_COUNT: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_SOURCE: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_ARTIFACT_URI: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_METRICS: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_PARAMS: ClassVar[Any] = None + DATABRICKS_ID: ClassVar[Any] = None + DATABRICKS_RUN_ID: ClassVar[Any] = None + DATABRICKS_RUN_NAME: ClassVar[Any] = None + DATABRICKS_RUN_START_TIME: ClassVar[Any] = None + DATABRICKS_RUN_END_TIME: ClassVar[Any] = None + DATABRICKS_STATUS: ClassVar[Any] = None + DATABRICKS_ALIASES: ClassVar[Any] = None + DATABRICKS_DATASET_COUNT: ClassVar[Any] = None + DATABRICKS_SOURCE: ClassVar[Any] = None + DATABRICKS_ARTIFACT_URI: ClassVar[Any] = None + DATABRICKS_METRICS: ClassVar[Any] = None + DATABRICKS_PARAMS: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -150,64 +147,42 @@ class DatabricksAIModelVersion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - databricks_ai_model_version_id: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionId" - ) + type_name: Union[str, UnsetType] = "DatabricksAIModelVersion" + + databricks_id: Union[int, None, UnsetType] = UNSET """The id of the model, unique to every version.""" - databricks_ai_model_version_run_id: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunId" - ) + databricks_run_id: Union[str, None, UnsetType] = UNSET """The run id of the model.""" - databricks_ai_model_version_run_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunName" - ) + databricks_run_name: Union[str, None, UnsetType] = UNSET """The run name of the model.""" - databricks_ai_model_version_run_start_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunStartTime") - ) + databricks_run_start_time: Union[int, None, UnsetType] = UNSET """The run start time of the model.""" - databricks_ai_model_version_run_end_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunEndTime") - ) + databricks_run_end_time: Union[int, None, UnsetType] = UNSET """The run end time of the model.""" - databricks_ai_model_version_status: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionStatus" - ) + databricks_status: Union[str, None, UnsetType] = UNSET """The status of the model.""" - databricks_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionAliases") - ) + databricks_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases of the model.""" - databricks_ai_model_version_dataset_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionDatasetCount") - ) + databricks_dataset_count: Union[int, None, UnsetType] = UNSET """Number of datasets.""" - databricks_ai_model_version_source: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionSource" - ) + databricks_source: Union[str, None, UnsetType] = UNSET """Source artifact link for the model.""" - databricks_ai_model_version_artifact_uri: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionArtifactUri") - ) + databricks_artifact_uri: Union[str, None, UnsetType] = UNSET """Artifact uri for the model.""" - databricks_ai_model_version_metrics: Union[ - List[Dict[str, Any]], None, UnsetType - ] = msgspec.field(default=UNSET, name="databricksAIModelVersionMetrics") + databricks_metrics: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - databricks_ai_model_version_params: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionParams") - ) + databricks_params: Union[Dict[str, str], None, UnsetType] = UNSET """Params with key mapped to value for an individual experiment.""" query_count: Union[int, None, UnsetType] = UNSET @@ -437,82 +412,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksAIModelVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.databricks_ai_model_context is UNSET: - errors.append("databricks_ai_model_context is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DatabricksAIModelVersion validation failed: {errors}") - - def minimize(self) -> "DatabricksAIModelVersion": - """ - Return a minimal copy of this DatabricksAIModelVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksAIModelVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksAIModelVersion instance with only the minimum required fields. - """ - self.validate() - return DatabricksAIModelVersion( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDatabricksAIModelVersion": - """ - Create a :class:`RelatedDatabricksAIModelVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksAIModelVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksAIModelVersion(guid=self.guid) - return RelatedDatabricksAIModelVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -570,64 +469,40 @@ def from_json( class DatabricksAIModelVersionAttributes(AssetAttributes): """DatabricksAIModelVersion-specific attributes for nested API format.""" - databricks_ai_model_version_id: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionId" - ) + databricks_id: Union[int, None, UnsetType] = UNSET """The id of the model, unique to every version.""" - databricks_ai_model_version_run_id: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunId" - ) + databricks_run_id: Union[str, None, UnsetType] = UNSET """The run id of the model.""" - databricks_ai_model_version_run_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunName" - ) + databricks_run_name: Union[str, None, UnsetType] = UNSET """The run name of the model.""" - databricks_ai_model_version_run_start_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunStartTime") - ) + databricks_run_start_time: Union[int, None, UnsetType] = UNSET """The run start time of the model.""" - databricks_ai_model_version_run_end_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunEndTime") - ) + databricks_run_end_time: Union[int, None, UnsetType] = UNSET """The run end time of the model.""" - databricks_ai_model_version_status: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionStatus" - ) + databricks_status: Union[str, None, UnsetType] = UNSET """The status of the model.""" - databricks_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionAliases") - ) + databricks_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases of the model.""" - databricks_ai_model_version_dataset_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionDatasetCount") - ) + databricks_dataset_count: Union[int, None, UnsetType] = UNSET """Number of datasets.""" - databricks_ai_model_version_source: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionSource" - ) + databricks_source: Union[str, None, UnsetType] = UNSET """Source artifact link for the model.""" - databricks_ai_model_version_artifact_uri: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionArtifactUri") - ) + databricks_artifact_uri: Union[str, None, UnsetType] = UNSET """Artifact uri for the model.""" - databricks_ai_model_version_metrics: Union[ - List[Dict[str, Any]], None, UnsetType - ] = msgspec.field(default=UNSET, name="databricksAIModelVersionMetrics") + databricks_metrics: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - databricks_ai_model_version_params: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionParams") - ) + databricks_params: Union[Dict[str, str], None, UnsetType] = UNSET """Params with key mapped to value for an individual experiment.""" query_count: Union[int, None, UnsetType] = UNSET @@ -917,28 +792,18 @@ def _populate_databricks_ai_model_version_attrs( ) -> None: """Populate DatabricksAIModelVersion-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_ai_model_version_id = obj.databricks_ai_model_version_id - attrs.databricks_ai_model_version_run_id = obj.databricks_ai_model_version_run_id - attrs.databricks_ai_model_version_run_name = ( - obj.databricks_ai_model_version_run_name - ) - attrs.databricks_ai_model_version_run_start_time = ( - obj.databricks_ai_model_version_run_start_time - ) - attrs.databricks_ai_model_version_run_end_time = ( - obj.databricks_ai_model_version_run_end_time - ) - attrs.databricks_ai_model_version_status = obj.databricks_ai_model_version_status - attrs.databricks_ai_model_version_aliases = obj.databricks_ai_model_version_aliases - attrs.databricks_ai_model_version_dataset_count = ( - obj.databricks_ai_model_version_dataset_count - ) - attrs.databricks_ai_model_version_source = obj.databricks_ai_model_version_source - attrs.databricks_ai_model_version_artifact_uri = ( - obj.databricks_ai_model_version_artifact_uri - ) - attrs.databricks_ai_model_version_metrics = obj.databricks_ai_model_version_metrics - attrs.databricks_ai_model_version_params = obj.databricks_ai_model_version_params + attrs.databricks_id = obj.databricks_id + attrs.databricks_run_id = obj.databricks_run_id + attrs.databricks_run_name = obj.databricks_run_name + attrs.databricks_run_start_time = obj.databricks_run_start_time + attrs.databricks_run_end_time = obj.databricks_run_end_time + attrs.databricks_status = obj.databricks_status + attrs.databricks_aliases = obj.databricks_aliases + attrs.databricks_dataset_count = obj.databricks_dataset_count + attrs.databricks_source = obj.databricks_source + attrs.databricks_artifact_uri = obj.databricks_artifact_uri + attrs.databricks_metrics = obj.databricks_metrics + attrs.databricks_params = obj.databricks_params attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -975,40 +840,18 @@ def _extract_databricks_ai_model_version_attrs( ) -> dict: """Extract all DatabricksAIModelVersion attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_ai_model_version_id"] = attrs.databricks_ai_model_version_id - result["databricks_ai_model_version_run_id"] = ( - attrs.databricks_ai_model_version_run_id - ) - result["databricks_ai_model_version_run_name"] = ( - attrs.databricks_ai_model_version_run_name - ) - result["databricks_ai_model_version_run_start_time"] = ( - attrs.databricks_ai_model_version_run_start_time - ) - result["databricks_ai_model_version_run_end_time"] = ( - attrs.databricks_ai_model_version_run_end_time - ) - result["databricks_ai_model_version_status"] = ( - attrs.databricks_ai_model_version_status - ) - result["databricks_ai_model_version_aliases"] = ( - attrs.databricks_ai_model_version_aliases - ) - result["databricks_ai_model_version_dataset_count"] = ( - attrs.databricks_ai_model_version_dataset_count - ) - result["databricks_ai_model_version_source"] = ( - attrs.databricks_ai_model_version_source - ) - result["databricks_ai_model_version_artifact_uri"] = ( - attrs.databricks_ai_model_version_artifact_uri - ) - result["databricks_ai_model_version_metrics"] = ( - attrs.databricks_ai_model_version_metrics - ) - result["databricks_ai_model_version_params"] = ( - attrs.databricks_ai_model_version_params - ) + result["databricks_id"] = attrs.databricks_id + result["databricks_run_id"] = attrs.databricks_run_id + result["databricks_run_name"] = attrs.databricks_run_name + result["databricks_run_start_time"] = attrs.databricks_run_start_time + result["databricks_run_end_time"] = attrs.databricks_run_end_time + result["databricks_status"] = attrs.databricks_status + result["databricks_aliases"] = attrs.databricks_aliases + result["databricks_dataset_count"] = attrs.databricks_dataset_count + result["databricks_source"] = attrs.databricks_source + result["databricks_artifact_uri"] = attrs.databricks_artifact_uri + result["databricks_metrics"] = attrs.databricks_metrics + result["databricks_params"] = attrs.databricks_params result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -1082,9 +925,6 @@ def _databricks_ai_model_version_to_nested( is_incomplete=databricks_ai_model_version.is_incomplete, provenance_type=databricks_ai_model_version.provenance_type, home_id=databricks_ai_model_version.home_id, - depth=databricks_ai_model_version.depth, - immediate_upstream=databricks_ai_model_version.immediate_upstream, - immediate_downstream=databricks_ai_model_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1120,6 +960,7 @@ def _databricks_ai_model_version_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1128,9 +969,6 @@ def _databricks_ai_model_version_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_ai_model_version_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1164,41 +1002,39 @@ def _databricks_ai_model_version_from_nested_bytes( RelationField, ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_ID = NumericField( - "databricksAIModelVersionId", "databricksAIModelVersionId" -) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_RUN_ID = KeywordField( - "databricksAIModelVersionRunId", "databricksAIModelVersionRunId" +DatabricksAIModelVersion.DATABRICKS_ID = NumericField("databricksId", "databricksId") +DatabricksAIModelVersion.DATABRICKS_RUN_ID = KeywordField( + "databricksRunId", "databricksRunId" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_RUN_NAME = KeywordField( - "databricksAIModelVersionRunName", "databricksAIModelVersionRunName" +DatabricksAIModelVersion.DATABRICKS_RUN_NAME = KeywordField( + "databricksRunName", "databricksRunName" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_RUN_START_TIME = NumericField( - "databricksAIModelVersionRunStartTime", "databricksAIModelVersionRunStartTime" +DatabricksAIModelVersion.DATABRICKS_RUN_START_TIME = NumericField( + "databricksRunStartTime", "databricksRunStartTime" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_RUN_END_TIME = NumericField( - "databricksAIModelVersionRunEndTime", "databricksAIModelVersionRunEndTime" +DatabricksAIModelVersion.DATABRICKS_RUN_END_TIME = NumericField( + "databricksRunEndTime", "databricksRunEndTime" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_STATUS = KeywordField( - "databricksAIModelVersionStatus", "databricksAIModelVersionStatus" +DatabricksAIModelVersion.DATABRICKS_STATUS = KeywordField( + "databricksStatus", "databricksStatus" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_ALIASES = KeywordField( - "databricksAIModelVersionAliases", "databricksAIModelVersionAliases" +DatabricksAIModelVersion.DATABRICKS_ALIASES = KeywordField( + "databricksAliases", "databricksAliases" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_DATASET_COUNT = NumericField( - "databricksAIModelVersionDatasetCount", "databricksAIModelVersionDatasetCount" +DatabricksAIModelVersion.DATABRICKS_DATASET_COUNT = NumericField( + "databricksDatasetCount", "databricksDatasetCount" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_SOURCE = KeywordField( - "databricksAIModelVersionSource", "databricksAIModelVersionSource" +DatabricksAIModelVersion.DATABRICKS_SOURCE = KeywordField( + "databricksSource", "databricksSource" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_ARTIFACT_URI = KeywordField( - "databricksAIModelVersionArtifactUri", "databricksAIModelVersionArtifactUri" +DatabricksAIModelVersion.DATABRICKS_ARTIFACT_URI = KeywordField( + "databricksArtifactUri", "databricksArtifactUri" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_METRICS = KeywordField( - "databricksAIModelVersionMetrics", "databricksAIModelVersionMetrics" +DatabricksAIModelVersion.DATABRICKS_METRICS = KeywordField( + "databricksMetrics", "databricksMetrics" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_PARAMS = KeywordField( - "databricksAIModelVersionParams", "databricksAIModelVersionParams" +DatabricksAIModelVersion.DATABRICKS_PARAMS = KeywordField( + "databricksParams", "databricksParams" ) DatabricksAIModelVersion.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksAIModelVersion.QUERY_USER_COUNT = NumericField( diff --git a/pyatlan_v9/model/assets/databricks_external_location.py b/pyatlan_v9/model/assets/databricks_external_location.py index 92fcafadb..e6a7d64ee 100644 --- a/pyatlan_v9/model/assets/databricks_external_location.py +++ b/pyatlan_v9/model/assets/databricks_external_location.py @@ -40,10 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import ( - RelatedDatabricksExternalLocation, - RelatedDatabricksExternalLocationPath, -) +from .databricks_related import RelatedDatabricksExternalLocationPath from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -130,6 +127,8 @@ class DatabricksExternalLocation(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DatabricksExternalLocation" + databricks_url: Union[str, None, UnsetType] = UNSET """URL of the external location.""" @@ -317,68 +316,6 @@ class DatabricksExternalLocation(Asset): def __post_init__(self) -> None: self.type_name = "DatabricksExternalLocation" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksExternalLocation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DatabricksExternalLocation validation failed: {errors}") - - def minimize(self) -> "DatabricksExternalLocation": - """ - Return a minimal copy of this DatabricksExternalLocation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksExternalLocation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksExternalLocation instance with only the minimum required fields. - """ - self.validate() - return DatabricksExternalLocation( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDatabricksExternalLocation": - """ - Create a :class:`RelatedDatabricksExternalLocation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksExternalLocation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksExternalLocation(guid=self.guid) - return RelatedDatabricksExternalLocation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -779,9 +716,6 @@ def _databricks_external_location_to_nested( is_incomplete=databricks_external_location.is_incomplete, provenance_type=databricks_external_location.provenance_type, home_id=databricks_external_location.home_id, - depth=databricks_external_location.depth, - immediate_upstream=databricks_external_location.immediate_upstream, - immediate_downstream=databricks_external_location.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -817,6 +751,7 @@ def _databricks_external_location_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -825,9 +760,6 @@ def _databricks_external_location_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_external_location_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/databricks_external_location_path.py b/pyatlan_v9/model/assets/databricks_external_location_path.py index 730c1a1fc..3cf4b865e 100644 --- a/pyatlan_v9/model/assets/databricks_external_location_path.py +++ b/pyatlan_v9/model/assets/databricks_external_location_path.py @@ -41,10 +41,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import ( - RelatedDatabricksExternalLocation, - RelatedDatabricksExternalLocationPath, -) +from .databricks_related import RelatedDatabricksExternalLocation from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -132,6 +129,8 @@ class DatabricksExternalLocationPath(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DatabricksExternalLocationPath" + databricks_path: Union[str, None, UnsetType] = UNSET """Path of data at the external location.""" @@ -328,76 +327,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksExternalLocationPath instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.databricks_external_location is UNSET: - errors.append("databricks_external_location is required for creation") - if errors: - raise ValueError( - f"DatabricksExternalLocationPath validation failed: {errors}" - ) - - def minimize(self) -> "DatabricksExternalLocationPath": - """ - Return a minimal copy of this DatabricksExternalLocationPath with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksExternalLocationPath with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksExternalLocationPath instance with only the minimum required fields. - """ - self.validate() - return DatabricksExternalLocationPath( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDatabricksExternalLocationPath": - """ - Create a :class:`RelatedDatabricksExternalLocationPath` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksExternalLocationPath reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksExternalLocationPath(guid=self.guid) - return RelatedDatabricksExternalLocationPath(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -805,9 +734,6 @@ def _databricks_external_location_path_to_nested( is_incomplete=databricks_external_location_path.is_incomplete, provenance_type=databricks_external_location_path.provenance_type, home_id=databricks_external_location_path.home_id, - depth=databricks_external_location_path.depth, - immediate_upstream=databricks_external_location_path.immediate_upstream, - immediate_downstream=databricks_external_location_path.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -843,6 +769,7 @@ def _databricks_external_location_path_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -851,9 +778,6 @@ def _databricks_external_location_path_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_external_location_path_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/databricks_metric_view.py b/pyatlan_v9/model/assets/databricks_metric_view.py index 256376d33..f5442d075 100644 --- a/pyatlan_v9/model/assets/databricks_metric_view.py +++ b/pyatlan_v9/model/assets/databricks_metric_view.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricksMetricView from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -136,6 +135,8 @@ class DatabricksMetricView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DatabricksMetricView" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -345,66 +346,6 @@ class DatabricksMetricView(Asset): def __post_init__(self) -> None: self.type_name = "DatabricksMetricView" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksMetricView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DatabricksMetricView validation failed: {errors}") - - def minimize(self) -> "DatabricksMetricView": - """ - Return a minimal copy of this DatabricksMetricView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksMetricView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksMetricView instance with only the minimum required fields. - """ - self.validate() - return DatabricksMetricView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricksMetricView": - """ - Create a :class:`RelatedDatabricksMetricView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksMetricView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksMetricView(guid=self.guid) - return RelatedDatabricksMetricView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -841,9 +782,6 @@ def _databricks_metric_view_to_nested( is_incomplete=databricks_metric_view.is_incomplete, provenance_type=databricks_metric_view.provenance_type, home_id=databricks_metric_view.home_id, - depth=databricks_metric_view.depth, - immediate_upstream=databricks_metric_view.immediate_upstream, - immediate_downstream=databricks_metric_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -879,6 +817,7 @@ def _databricks_metric_view_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -887,9 +826,6 @@ def _databricks_metric_view_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_metric_view_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/databricks_notebook.py b/pyatlan_v9/model/assets/databricks_notebook.py index 2ba2e86b8..894d57ce5 100644 --- a/pyatlan_v9/model/assets/databricks_notebook.py +++ b/pyatlan_v9/model/assets/databricks_notebook.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricksNotebook from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -70,8 +69,8 @@ class DatabricksNotebook(Asset): Base class for all databricks notebook assets. """ - DATABRICKS_NOTEBOOK_PATH: ClassVar[Any] = None - DATABRICKS_NOTEBOOK_WORKSPACE_ID: ClassVar[Any] = None + DATABRICKS_PATH: ClassVar[Any] = None + DATABRICKS_WORKSPACE_ID: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -126,10 +125,12 @@ class DatabricksNotebook(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - databricks_notebook_path: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "DatabricksNotebook" + + databricks_path: Union[str, None, UnsetType] = UNSET """Path of the notebook.""" - databricks_notebook_workspace_id: Union[str, None, UnsetType] = UNSET + databricks_workspace_id: Union[str, None, UnsetType] = UNSET """Workspace Id of the notebook.""" query_count: Union[int, None, UnsetType] = UNSET @@ -308,66 +309,6 @@ class DatabricksNotebook(Asset): def __post_init__(self) -> None: self.type_name = "DatabricksNotebook" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksNotebook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DatabricksNotebook validation failed: {errors}") - - def minimize(self) -> "DatabricksNotebook": - """ - Return a minimal copy of this DatabricksNotebook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksNotebook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksNotebook instance with only the minimum required fields. - """ - self.validate() - return DatabricksNotebook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricksNotebook": - """ - Create a :class:`RelatedDatabricksNotebook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksNotebook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksNotebook(guid=self.guid) - return RelatedDatabricksNotebook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -425,10 +366,10 @@ def from_json( class DatabricksNotebookAttributes(AssetAttributes): """DatabricksNotebook-specific attributes for nested API format.""" - databricks_notebook_path: Union[str, None, UnsetType] = UNSET + databricks_path: Union[str, None, UnsetType] = UNSET """Path of the notebook.""" - databricks_notebook_workspace_id: Union[str, None, UnsetType] = UNSET + databricks_workspace_id: Union[str, None, UnsetType] = UNSET """Workspace Id of the notebook.""" query_count: Union[int, None, UnsetType] = UNSET @@ -673,8 +614,8 @@ def _populate_databricks_notebook_attrs( ) -> None: """Populate DatabricksNotebook-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_notebook_path = obj.databricks_notebook_path - attrs.databricks_notebook_workspace_id = obj.databricks_notebook_workspace_id + attrs.databricks_path = obj.databricks_path + attrs.databricks_workspace_id = obj.databricks_workspace_id attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -698,8 +639,8 @@ def _populate_databricks_notebook_attrs( def _extract_databricks_notebook_attrs(attrs: DatabricksNotebookAttributes) -> dict: """Extract all DatabricksNotebook attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_notebook_path"] = attrs.databricks_notebook_path - result["databricks_notebook_workspace_id"] = attrs.databricks_notebook_workspace_id + result["databricks_path"] = attrs.databricks_path + result["databricks_workspace_id"] = attrs.databricks_workspace_id result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -760,9 +701,6 @@ def _databricks_notebook_to_nested( is_incomplete=databricks_notebook.is_incomplete, provenance_type=databricks_notebook.provenance_type, home_id=databricks_notebook.home_id, - depth=databricks_notebook.depth, - immediate_upstream=databricks_notebook.immediate_upstream, - immediate_downstream=databricks_notebook.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -798,6 +736,7 @@ def _databricks_notebook_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -806,9 +745,6 @@ def _databricks_notebook_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_notebook_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -840,11 +776,9 @@ def _databricks_notebook_from_nested_bytes( RelationField, ) -DatabricksNotebook.DATABRICKS_NOTEBOOK_PATH = KeywordField( - "databricksNotebookPath", "databricksNotebookPath" -) -DatabricksNotebook.DATABRICKS_NOTEBOOK_WORKSPACE_ID = KeywordField( - "databricksNotebookWorkspaceId", "databricksNotebookWorkspaceId" +DatabricksNotebook.DATABRICKS_PATH = KeywordField("databricksPath", "databricksPath") +DatabricksNotebook.DATABRICKS_WORKSPACE_ID = KeywordField( + "databricksWorkspaceId", "databricksWorkspaceId" ) DatabricksNotebook.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksNotebook.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") diff --git a/pyatlan_v9/model/assets/databricks_related.py b/pyatlan_v9/model/assets/databricks_related.py index bfb81a71e..7b4c15f13 100644 --- a/pyatlan_v9/model/assets/databricks_related.py +++ b/pyatlan_v9/model/assets/databricks_related.py @@ -13,7 +13,6 @@ from typing import Any, Dict, List, Union -import msgspec from msgspec import UNSET, UnsetType from .referenceable_related import RelatedReferenceable @@ -58,13 +57,13 @@ class RelatedDatabricksVolume(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksVolume" so it serializes correctly - databricks_volume_owner: Union[str, None, UnsetType] = UNSET + databricks_owner: Union[str, None, UnsetType] = UNSET """User or group (principal) currently owning the volume.""" - databricks_volume_external_location: Union[str, None, UnsetType] = UNSET + databricks_external_location: Union[str, None, UnsetType] = UNSET """The storage location where the volume is created.""" - databricks_volume_type: Union[str, None, UnsetType] = UNSET + databricks_type: Union[str, None, UnsetType] = UNSET """Type of the volume.""" def __post_init__(self) -> None: @@ -82,13 +81,13 @@ class RelatedDatabricksVolumePath(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksVolumePath" so it serializes correctly - databricks_volume_path_path: Union[str, None, UnsetType] = UNSET + databricks_path: Union[str, None, UnsetType] = UNSET """Path of data on the volume.""" - databricks_volume_path_volume_qualified_name: Union[str, None, UnsetType] = UNSET + databricks_volume_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent volume.""" - databricks_volume_path_volume_name: Union[str, None, UnsetType] = UNSET + databricks_volume_name: Union[str, None, UnsetType] = UNSET """Name of the parent volume.""" def __post_init__(self) -> None: @@ -151,9 +150,7 @@ class RelatedDatabricksAIModelContext(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksAIModelContext" so it serializes correctly - databricks_ai_model_context_metastore_id: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelContextMetastoreId") - ) + databricks_metastore_id: Union[str, None, UnsetType] = UNSET """The id of the model, common across versions.""" def __post_init__(self) -> None: @@ -171,64 +168,40 @@ class RelatedDatabricksAIModelVersion(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksAIModelVersion" so it serializes correctly - databricks_ai_model_version_id: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionId" - ) + databricks_id: Union[int, None, UnsetType] = UNSET """The id of the model, unique to every version.""" - databricks_ai_model_version_run_id: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunId" - ) + databricks_run_id: Union[str, None, UnsetType] = UNSET """The run id of the model.""" - databricks_ai_model_version_run_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunName" - ) + databricks_run_name: Union[str, None, UnsetType] = UNSET """The run name of the model.""" - databricks_ai_model_version_run_start_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunStartTime") - ) + databricks_run_start_time: Union[int, None, UnsetType] = UNSET """The run start time of the model.""" - databricks_ai_model_version_run_end_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunEndTime") - ) + databricks_run_end_time: Union[int, None, UnsetType] = UNSET """The run end time of the model.""" - databricks_ai_model_version_status: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionStatus" - ) + databricks_status: Union[str, None, UnsetType] = UNSET """The status of the model.""" - databricks_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionAliases") - ) + databricks_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases of the model.""" - databricks_ai_model_version_dataset_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionDatasetCount") - ) + databricks_dataset_count: Union[int, None, UnsetType] = UNSET """Number of datasets.""" - databricks_ai_model_version_source: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionSource" - ) + databricks_source: Union[str, None, UnsetType] = UNSET """Source artifact link for the model.""" - databricks_ai_model_version_artifact_uri: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionArtifactUri") - ) + databricks_artifact_uri: Union[str, None, UnsetType] = UNSET """Artifact uri for the model.""" - databricks_ai_model_version_metrics: Union[ - List[Dict[str, Any]], None, UnsetType - ] = msgspec.field(default=UNSET, name="databricksAIModelVersionMetrics") + databricks_metrics: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - databricks_ai_model_version_params: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionParams") - ) + databricks_params: Union[Dict[str, str], None, UnsetType] = UNSET """Params with key mapped to value for an individual experiment.""" def __post_init__(self) -> None: @@ -261,10 +234,10 @@ class RelatedDatabricksNotebook(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksNotebook" so it serializes correctly - databricks_notebook_path: Union[str, None, UnsetType] = UNSET + databricks_path: Union[str, None, UnsetType] = UNSET """Path of the notebook.""" - databricks_notebook_workspace_id: Union[str, None, UnsetType] = UNSET + databricks_workspace_id: Union[str, None, UnsetType] = UNSET """Workspace Id of the notebook.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/databricks_volume.py b/pyatlan_v9/model/assets/databricks_volume.py index 4f1d2e519..903193da1 100644 --- a/pyatlan_v9/model/assets/databricks_volume.py +++ b/pyatlan_v9/model/assets/databricks_volume.py @@ -41,7 +41,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricksVolume, RelatedDatabricksVolumePath +from .databricks_related import RelatedDatabricksVolumePath from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -72,9 +72,9 @@ class DatabricksVolume(Asset): Represents a Databricks Volume, a storage object for managing and accessing data files within Databricks workspaces. """ - DATABRICKS_VOLUME_OWNER: ClassVar[Any] = None - DATABRICKS_VOLUME_EXTERNAL_LOCATION: ClassVar[Any] = None - DATABRICKS_VOLUME_TYPE: ClassVar[Any] = None + DATABRICKS_OWNER: ClassVar[Any] = None + DATABRICKS_EXTERNAL_LOCATION: ClassVar[Any] = None + DATABRICKS_TYPE: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -131,13 +131,15 @@ class DatabricksVolume(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - databricks_volume_owner: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "DatabricksVolume" + + databricks_owner: Union[str, None, UnsetType] = UNSET """User or group (principal) currently owning the volume.""" - databricks_volume_external_location: Union[str, None, UnsetType] = UNSET + databricks_external_location: Union[str, None, UnsetType] = UNSET """The storage location where the volume is created.""" - databricks_volume_type: Union[str, None, UnsetType] = UNSET + databricks_type: Union[str, None, UnsetType] = UNSET """Type of the volume.""" query_count: Union[int, None, UnsetType] = UNSET @@ -332,78 +334,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksVolume instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DatabricksVolume validation failed: {errors}") - - def minimize(self) -> "DatabricksVolume": - """ - Return a minimal copy of this DatabricksVolume with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksVolume with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksVolume instance with only the minimum required fields. - """ - self.validate() - return DatabricksVolume(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricksVolume": - """ - Create a :class:`RelatedDatabricksVolume` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksVolume reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksVolume(guid=self.guid) - return RelatedDatabricksVolume(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -461,13 +391,13 @@ def from_json( class DatabricksVolumeAttributes(AssetAttributes): """DatabricksVolume-specific attributes for nested API format.""" - databricks_volume_owner: Union[str, None, UnsetType] = UNSET + databricks_owner: Union[str, None, UnsetType] = UNSET """User or group (principal) currently owning the volume.""" - databricks_volume_external_location: Union[str, None, UnsetType] = UNSET + databricks_external_location: Union[str, None, UnsetType] = UNSET """The storage location where the volume is created.""" - databricks_volume_type: Union[str, None, UnsetType] = UNSET + databricks_type: Union[str, None, UnsetType] = UNSET """Type of the volume.""" query_count: Union[int, None, UnsetType] = UNSET @@ -722,9 +652,9 @@ def _populate_databricks_volume_attrs( ) -> None: """Populate DatabricksVolume-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_volume_owner = obj.databricks_volume_owner - attrs.databricks_volume_external_location = obj.databricks_volume_external_location - attrs.databricks_volume_type = obj.databricks_volume_type + attrs.databricks_owner = obj.databricks_owner + attrs.databricks_external_location = obj.databricks_external_location + attrs.databricks_type = obj.databricks_type attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -748,11 +678,9 @@ def _populate_databricks_volume_attrs( def _extract_databricks_volume_attrs(attrs: DatabricksVolumeAttributes) -> dict: """Extract all DatabricksVolume attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_volume_owner"] = attrs.databricks_volume_owner - result["databricks_volume_external_location"] = ( - attrs.databricks_volume_external_location - ) - result["databricks_volume_type"] = attrs.databricks_volume_type + result["databricks_owner"] = attrs.databricks_owner + result["databricks_external_location"] = attrs.databricks_external_location + result["databricks_type"] = attrs.databricks_type result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -813,9 +741,6 @@ def _databricks_volume_to_nested( is_incomplete=databricks_volume.is_incomplete, provenance_type=databricks_volume.provenance_type, home_id=databricks_volume.home_id, - depth=databricks_volume.depth, - immediate_upstream=databricks_volume.immediate_upstream, - immediate_downstream=databricks_volume.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -849,6 +774,7 @@ def _databricks_volume_from_nested(nested: DatabricksVolumeNested) -> Databricks updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -857,9 +783,6 @@ def _databricks_volume_from_nested(nested: DatabricksVolumeNested) -> Databricks is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_volume_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -889,15 +812,11 @@ def _databricks_volume_from_nested_bytes(data: bytes, serde: Serde) -> Databrick RelationField, ) -DatabricksVolume.DATABRICKS_VOLUME_OWNER = KeywordField( - "databricksVolumeOwner", "databricksVolumeOwner" -) -DatabricksVolume.DATABRICKS_VOLUME_EXTERNAL_LOCATION = KeywordField( - "databricksVolumeExternalLocation", "databricksVolumeExternalLocation" -) -DatabricksVolume.DATABRICKS_VOLUME_TYPE = KeywordField( - "databricksVolumeType", "databricksVolumeType" +DatabricksVolume.DATABRICKS_OWNER = KeywordField("databricksOwner", "databricksOwner") +DatabricksVolume.DATABRICKS_EXTERNAL_LOCATION = KeywordField( + "databricksExternalLocation", "databricksExternalLocation" ) +DatabricksVolume.DATABRICKS_TYPE = KeywordField("databricksType", "databricksType") DatabricksVolume.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksVolume.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") DatabricksVolume.QUERY_USER_MAP = KeywordField("queryUserMap", "queryUserMap") diff --git a/pyatlan_v9/model/assets/databricks_volume_path.py b/pyatlan_v9/model/assets/databricks_volume_path.py index eae26d448..a96686d6c 100644 --- a/pyatlan_v9/model/assets/databricks_volume_path.py +++ b/pyatlan_v9/model/assets/databricks_volume_path.py @@ -41,7 +41,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricksVolume, RelatedDatabricksVolumePath +from .databricks_related import RelatedDatabricksVolume from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, @@ -71,9 +71,9 @@ class DatabricksVolumePath(Asset): Represents a path within a Databricks Volume, providing access to specific data files or directories. """ - DATABRICKS_VOLUME_PATH_PATH: ClassVar[Any] = None - DATABRICKS_VOLUME_PATH_VOLUME_QUALIFIED_NAME: ClassVar[Any] = None - DATABRICKS_VOLUME_PATH_VOLUME_NAME: ClassVar[Any] = None + DATABRICKS_PATH: ClassVar[Any] = None + DATABRICKS_VOLUME_QUALIFIED_NAME: ClassVar[Any] = None + DATABRICKS_VOLUME_NAME: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -129,13 +129,15 @@ class DatabricksVolumePath(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - databricks_volume_path_path: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "DatabricksVolumePath" + + databricks_path: Union[str, None, UnsetType] = UNSET """Path of data on the volume.""" - databricks_volume_path_volume_qualified_name: Union[str, None, UnsetType] = UNSET + databricks_volume_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent volume.""" - databricks_volume_path_volume_name: Union[str, None, UnsetType] = UNSET + databricks_volume_name: Union[str, None, UnsetType] = UNSET """Name of the parent volume.""" query_count: Union[int, None, UnsetType] = UNSET @@ -325,86 +327,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksVolumePath instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.databricks_volume is UNSET: - errors.append("databricks_volume is required for creation") - if self.databricks_volume_name is UNSET: - errors.append("databricks_volume_name is required for creation") - if self.databricks_volume_qualified_name is UNSET: - errors.append( - "databricks_volume_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DatabricksVolumePath validation failed: {errors}") - - def minimize(self) -> "DatabricksVolumePath": - """ - Return a minimal copy of this DatabricksVolumePath with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksVolumePath with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksVolumePath instance with only the minimum required fields. - """ - self.validate() - return DatabricksVolumePath(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricksVolumePath": - """ - Create a :class:`RelatedDatabricksVolumePath` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksVolumePath reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksVolumePath(guid=self.guid) - return RelatedDatabricksVolumePath(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -462,13 +384,13 @@ def from_json( class DatabricksVolumePathAttributes(AssetAttributes): """DatabricksVolumePath-specific attributes for nested API format.""" - databricks_volume_path_path: Union[str, None, UnsetType] = UNSET + databricks_path: Union[str, None, UnsetType] = UNSET """Path of data on the volume.""" - databricks_volume_path_volume_qualified_name: Union[str, None, UnsetType] = UNSET + databricks_volume_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent volume.""" - databricks_volume_path_volume_name: Union[str, None, UnsetType] = UNSET + databricks_volume_name: Union[str, None, UnsetType] = UNSET """Name of the parent volume.""" query_count: Union[int, None, UnsetType] = UNSET @@ -717,11 +639,9 @@ def _populate_databricks_volume_path_attrs( ) -> None: """Populate DatabricksVolumePath-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_volume_path_path = obj.databricks_volume_path_path - attrs.databricks_volume_path_volume_qualified_name = ( - obj.databricks_volume_path_volume_qualified_name - ) - attrs.databricks_volume_path_volume_name = obj.databricks_volume_path_volume_name + attrs.databricks_path = obj.databricks_path + attrs.databricks_volume_qualified_name = obj.databricks_volume_qualified_name + attrs.databricks_volume_name = obj.databricks_volume_name attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -747,13 +667,9 @@ def _extract_databricks_volume_path_attrs( ) -> dict: """Extract all DatabricksVolumePath attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_volume_path_path"] = attrs.databricks_volume_path_path - result["databricks_volume_path_volume_qualified_name"] = ( - attrs.databricks_volume_path_volume_qualified_name - ) - result["databricks_volume_path_volume_name"] = ( - attrs.databricks_volume_path_volume_name - ) + result["databricks_path"] = attrs.databricks_path + result["databricks_volume_qualified_name"] = attrs.databricks_volume_qualified_name + result["databricks_volume_name"] = attrs.databricks_volume_name result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -814,9 +730,6 @@ def _databricks_volume_path_to_nested( is_incomplete=databricks_volume_path.is_incomplete, provenance_type=databricks_volume_path.provenance_type, home_id=databricks_volume_path.home_id, - depth=databricks_volume_path.depth, - immediate_upstream=databricks_volume_path.immediate_upstream, - immediate_downstream=databricks_volume_path.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -852,6 +765,7 @@ def _databricks_volume_path_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -860,9 +774,6 @@ def _databricks_volume_path_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_volume_path_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -894,14 +805,12 @@ def _databricks_volume_path_from_nested_bytes( RelationField, ) -DatabricksVolumePath.DATABRICKS_VOLUME_PATH_PATH = KeywordField( - "databricksVolumePathPath", "databricksVolumePathPath" -) -DatabricksVolumePath.DATABRICKS_VOLUME_PATH_VOLUME_QUALIFIED_NAME = KeywordField( - "databricksVolumePathVolumeQualifiedName", "databricksVolumePathVolumeQualifiedName" +DatabricksVolumePath.DATABRICKS_PATH = KeywordField("databricksPath", "databricksPath") +DatabricksVolumePath.DATABRICKS_VOLUME_QUALIFIED_NAME = KeywordField( + "databricksVolumeQualifiedName", "databricksVolumeQualifiedName" ) -DatabricksVolumePath.DATABRICKS_VOLUME_PATH_VOLUME_NAME = KeywordField( - "databricksVolumePathVolumeName", "databricksVolumePathVolumeName" +DatabricksVolumePath.DATABRICKS_VOLUME_NAME = KeywordField( + "databricksVolumeName", "databricksVolumeName" ) DatabricksVolumePath.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksVolumePath.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") diff --git a/pyatlan_v9/model/assets/dataverse.py b/pyatlan_v9/model/assets/dataverse.py index a299d8b60..9903d27b5 100644 --- a/pyatlan_v9/model/assets/dataverse.py +++ b/pyatlan_v9/model/assets/dataverse.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dataverse_related import RelatedDataverse from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +93,8 @@ class Dataverse(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Dataverse" + dataverse_is_custom: Union[bool, None, UnsetType] = UNSET """Indicator if DataverseEntity is custom built.""" @@ -198,66 +199,6 @@ class Dataverse(Asset): def __post_init__(self) -> None: self.type_name = "Dataverse" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Dataverse instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Dataverse validation failed: {errors}") - - def minimize(self) -> "Dataverse": - """ - Return a minimal copy of this Dataverse with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Dataverse with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Dataverse instance with only the minimum required fields. - """ - self.validate() - return Dataverse(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataverse": - """ - Create a :class:`RelatedDataverse` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataverse reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataverse(guid=self.guid) - return RelatedDataverse(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -519,9 +460,6 @@ def _dataverse_to_nested(dataverse: Dataverse) -> DataverseNested: is_incomplete=dataverse.is_incomplete, provenance_type=dataverse.provenance_type, home_id=dataverse.home_id, - depth=dataverse.depth, - immediate_upstream=dataverse.immediate_upstream, - immediate_downstream=dataverse.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -553,6 +491,7 @@ def _dataverse_from_nested(nested: DataverseNested) -> Dataverse: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -561,9 +500,6 @@ def _dataverse_from_nested(nested: DataverseNested) -> Dataverse: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dataverse_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dataverse_attribute.py b/pyatlan_v9/model/assets/dataverse_attribute.py index 7545b49fe..37736c7cf 100644 --- a/pyatlan_v9/model/assets/dataverse_attribute.py +++ b/pyatlan_v9/model/assets/dataverse_attribute.py @@ -41,7 +41,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dataverse_related import RelatedDataverseAttribute, RelatedDataverseEntity +from .dataverse_related import RelatedDataverseEntity from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -102,6 +102,8 @@ class DataverseAttribute(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataverseAttribute" + dataverse_entity_qualified_name: Union[str, None, UnsetType] = UNSET """Entity Qualified Name of the DataverseAttribute.""" @@ -230,76 +232,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataverseAttribute instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dataverse_entity is UNSET: - errors.append("dataverse_entity is required for creation") - if self.dataverse_entity_qualified_name is UNSET: - errors.append( - "dataverse_entity_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"DataverseAttribute validation failed: {errors}") - - def minimize(self) -> "DataverseAttribute": - """ - Return a minimal copy of this DataverseAttribute with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataverseAttribute with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataverseAttribute instance with only the minimum required fields. - """ - self.validate() - return DataverseAttribute(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataverseAttribute": - """ - Create a :class:`RelatedDataverseAttribute` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataverseAttribute reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataverseAttribute(guid=self.guid) - return RelatedDataverseAttribute(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -654,9 +586,6 @@ def _dataverse_attribute_to_nested( is_incomplete=dataverse_attribute.is_incomplete, provenance_type=dataverse_attribute.provenance_type, home_id=dataverse_attribute.home_id, - depth=dataverse_attribute.depth, - immediate_upstream=dataverse_attribute.immediate_upstream, - immediate_downstream=dataverse_attribute.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -692,6 +621,7 @@ def _dataverse_attribute_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -700,9 +630,6 @@ def _dataverse_attribute_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dataverse_attribute_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dataverse_entity.py b/pyatlan_v9/model/assets/dataverse_entity.py index fc86cc43a..912ff96f9 100644 --- a/pyatlan_v9/model/assets/dataverse_entity.py +++ b/pyatlan_v9/model/assets/dataverse_entity.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dataverse_related import RelatedDataverseAttribute, RelatedDataverseEntity +from .dataverse_related import RelatedDataverseAttribute from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +98,8 @@ class DataverseEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataverseEntity" + dataverse_entity_schema_name: Union[str, None, UnsetType] = UNSET """Schema Name of the DataverseEntity.""" @@ -213,66 +215,6 @@ class DataverseEntity(Asset): def __post_init__(self) -> None: self.type_name = "DataverseEntity" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataverseEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataverseEntity validation failed: {errors}") - - def minimize(self) -> "DataverseEntity": - """ - Return a minimal copy of this DataverseEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataverseEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataverseEntity instance with only the minimum required fields. - """ - self.validate() - return DataverseEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataverseEntity": - """ - Create a :class:`RelatedDataverseEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataverseEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataverseEntity(guid=self.guid) - return RelatedDataverseEntity(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -596,9 +538,6 @@ def _dataverse_entity_to_nested( is_incomplete=dataverse_entity.is_incomplete, provenance_type=dataverse_entity.provenance_type, home_id=dataverse_entity.home_id, - depth=dataverse_entity.depth, - immediate_upstream=dataverse_entity.immediate_upstream, - immediate_downstream=dataverse_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -632,6 +571,7 @@ def _dataverse_entity_from_nested(nested: DataverseEntityNested) -> DataverseEnt updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -640,9 +580,6 @@ def _dataverse_entity_from_nested(nested: DataverseEntityNested) -> DataverseEnt is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dataverse_entity_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt.py b/pyatlan_v9/model/assets/dbt.py index f7ae0b3a0..724208939 100644 --- a/pyatlan_v9/model/assets/dbt.py +++ b/pyatlan_v9/model/assets/dbt.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbt from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -110,6 +109,8 @@ class Dbt(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Dbt" + dbt_alias: Union[str, None, UnsetType] = UNSET """Alias of this asset in dbt.""" @@ -262,66 +263,6 @@ class Dbt(Asset): def __post_init__(self) -> None: self.type_name = "Dbt" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Dbt instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Dbt validation failed: {errors}") - - def minimize(self) -> "Dbt": - """ - Return a minimal copy of this Dbt with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Dbt with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Dbt instance with only the minimum required fields. - """ - self.validate() - return Dbt(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbt": - """ - Create a :class:`RelatedDbt` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbt reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbt(guid=self.guid) - return RelatedDbt(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -659,9 +600,6 @@ def _dbt_to_nested(dbt: Dbt) -> DbtNested: is_incomplete=dbt.is_incomplete, provenance_type=dbt.provenance_type, home_id=dbt.home_id, - depth=dbt.depth, - immediate_upstream=dbt.immediate_upstream, - immediate_downstream=dbt.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -691,6 +629,7 @@ def _dbt_from_nested(nested: DbtNested) -> Dbt: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -699,9 +638,6 @@ def _dbt_from_nested(nested: DbtNested) -> Dbt: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_column_process.py b/pyatlan_v9/model/assets/dbt_column_process.py index 23ba9c204..f1dac414f 100644 --- a/pyatlan_v9/model/assets/dbt_column_process.py +++ b/pyatlan_v9/model/assets/dbt_column_process.py @@ -42,7 +42,6 @@ from .catalog_related import RelatedCatalog from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtColumnProcess from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector from .flow_related import RelatedFlowControlOperation @@ -140,6 +139,8 @@ class DbtColumnProcess(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtColumnProcess" + dbt_column_process_job_status: Union[str, None, UnsetType] = UNSET """Status of the dbt column process job.""" @@ -357,66 +358,6 @@ class DbtColumnProcess(Asset): def __post_init__(self) -> None: self.type_name = "DbtColumnProcess" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtColumnProcess instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtColumnProcess validation failed: {errors}") - - def minimize(self) -> "DbtColumnProcess": - """ - Return a minimal copy of this DbtColumnProcess with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtColumnProcess with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtColumnProcess instance with only the minimum required fields. - """ - self.validate() - return DbtColumnProcess(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtColumnProcess": - """ - Create a :class:`RelatedDbtColumnProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtColumnProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtColumnProcess(guid=self.guid) - return RelatedDbtColumnProcess(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -865,9 +806,6 @@ def _dbt_column_process_to_nested( is_incomplete=dbt_column_process.is_incomplete, provenance_type=dbt_column_process.provenance_type, home_id=dbt_column_process.home_id, - depth=dbt_column_process.depth, - immediate_upstream=dbt_column_process.immediate_upstream, - immediate_downstream=dbt_column_process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -901,6 +839,7 @@ def _dbt_column_process_from_nested(nested: DbtColumnProcessNested) -> DbtColumn updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -909,9 +848,6 @@ def _dbt_column_process_from_nested(nested: DbtColumnProcessNested) -> DbtColumn is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_column_process_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_dimension.py b/pyatlan_v9/model/assets/dbt_dimension.py index d26be7b0f..280c6e092 100644 --- a/pyatlan_v9/model/assets/dbt_dimension.py +++ b/pyatlan_v9/model/assets/dbt_dimension.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtDimension from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -121,6 +120,8 @@ class DbtDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtDimension" + dbt_semantic_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dbt semantic model this dimension belongs to.""" @@ -303,66 +304,6 @@ class DbtDimension(Asset): def __post_init__(self) -> None: self.type_name = "DbtDimension" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtDimension validation failed: {errors}") - - def minimize(self) -> "DbtDimension": - """ - Return a minimal copy of this DbtDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtDimension instance with only the minimum required fields. - """ - self.validate() - return DbtDimension(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtDimension": - """ - Create a :class:`RelatedDbtDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtDimension(guid=self.guid) - return RelatedDbtDimension(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -761,9 +702,6 @@ def _dbt_dimension_to_nested(dbt_dimension: DbtDimension) -> DbtDimensionNested: is_incomplete=dbt_dimension.is_incomplete, provenance_type=dbt_dimension.provenance_type, home_id=dbt_dimension.home_id, - depth=dbt_dimension.depth, - immediate_upstream=dbt_dimension.immediate_upstream, - immediate_downstream=dbt_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -797,6 +735,7 @@ def _dbt_dimension_from_nested(nested: DbtDimensionNested) -> DbtDimension: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -805,9 +744,6 @@ def _dbt_dimension_from_nested(nested: DbtDimensionNested) -> DbtDimension: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_entity.py b/pyatlan_v9/model/assets/dbt_entity.py index 681a425f1..1fc8879e7 100644 --- a/pyatlan_v9/model/assets/dbt_entity.py +++ b/pyatlan_v9/model/assets/dbt_entity.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtEntity from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -120,6 +119,8 @@ class DbtEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtEntity" + dbt_semantic_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dbt semantic model this entity belongs to.""" @@ -299,66 +300,6 @@ class DbtEntity(Asset): def __post_init__(self) -> None: self.type_name = "DbtEntity" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtEntity validation failed: {errors}") - - def minimize(self) -> "DbtEntity": - """ - Return a minimal copy of this DbtEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtEntity instance with only the minimum required fields. - """ - self.validate() - return DbtEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtEntity": - """ - Create a :class:`RelatedDbtEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtEntity(guid=self.guid) - return RelatedDbtEntity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -746,9 +687,6 @@ def _dbt_entity_to_nested(dbt_entity: DbtEntity) -> DbtEntityNested: is_incomplete=dbt_entity.is_incomplete, provenance_type=dbt_entity.provenance_type, home_id=dbt_entity.home_id, - depth=dbt_entity.depth, - immediate_upstream=dbt_entity.immediate_upstream, - immediate_downstream=dbt_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -780,6 +718,7 @@ def _dbt_entity_from_nested(nested: DbtEntityNested) -> DbtEntity: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -788,9 +727,6 @@ def _dbt_entity_from_nested(nested: DbtEntityNested) -> DbtEntity: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_entity_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_measure.py b/pyatlan_v9/model/assets/dbt_measure.py index ee6aa9e25..fbe9d8f67 100644 --- a/pyatlan_v9/model/assets/dbt_measure.py +++ b/pyatlan_v9/model/assets/dbt_measure.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtMeasure from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -120,6 +119,8 @@ class DbtMeasure(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtMeasure" + dbt_semantic_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dbt semantic model this measure belongs to.""" @@ -299,66 +300,6 @@ class DbtMeasure(Asset): def __post_init__(self) -> None: self.type_name = "DbtMeasure" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtMeasure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtMeasure validation failed: {errors}") - - def minimize(self) -> "DbtMeasure": - """ - Return a minimal copy of this DbtMeasure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtMeasure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtMeasure instance with only the minimum required fields. - """ - self.validate() - return DbtMeasure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtMeasure": - """ - Create a :class:`RelatedDbtMeasure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtMeasure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtMeasure(guid=self.guid) - return RelatedDbtMeasure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -746,9 +687,6 @@ def _dbt_measure_to_nested(dbt_measure: DbtMeasure) -> DbtMeasureNested: is_incomplete=dbt_measure.is_incomplete, provenance_type=dbt_measure.provenance_type, home_id=dbt_measure.home_id, - depth=dbt_measure.depth, - immediate_upstream=dbt_measure.immediate_upstream, - immediate_downstream=dbt_measure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -780,6 +718,7 @@ def _dbt_measure_from_nested(nested: DbtMeasureNested) -> DbtMeasure: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -788,9 +727,6 @@ def _dbt_measure_from_nested(nested: DbtMeasureNested) -> DbtMeasure: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_measure_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_metric.py b/pyatlan_v9/model/assets/dbt_metric.py index 6b8b2b1d2..c564c70b2 100644 --- a/pyatlan_v9/model/assets/dbt_metric.py +++ b/pyatlan_v9/model/assets/dbt_metric.py @@ -41,7 +41,7 @@ from .asset_related import RelatedAsset from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtMetric, RelatedDbtModel +from .dbt_related import RelatedDbtModel from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -128,6 +128,8 @@ class DbtMetric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtMetric" + dbt_metric_filters: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Filters applied to the dbt metric.""" @@ -327,66 +329,6 @@ class DbtMetric(Asset): def __post_init__(self) -> None: self.type_name = "DbtMetric" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtMetric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtMetric validation failed: {errors}") - - def minimize(self) -> "DbtMetric": - """ - Return a minimal copy of this DbtMetric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtMetric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtMetric instance with only the minimum required fields. - """ - self.validate() - return DbtMetric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtMetric": - """ - Create a :class:`RelatedDbtMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtMetric(guid=self.guid) - return RelatedDbtMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -802,9 +744,6 @@ def _dbt_metric_to_nested(dbt_metric: DbtMetric) -> DbtMetricNested: is_incomplete=dbt_metric.is_incomplete, provenance_type=dbt_metric.provenance_type, home_id=dbt_metric.home_id, - depth=dbt_metric.depth, - immediate_upstream=dbt_metric.immediate_upstream, - immediate_downstream=dbt_metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -836,6 +775,7 @@ def _dbt_metric_from_nested(nested: DbtMetricNested) -> DbtMetric: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -844,9 +784,6 @@ def _dbt_metric_from_nested(nested: DbtMetricNested) -> DbtMetric: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_metric_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_model.py b/pyatlan_v9/model/assets/dbt_model.py index ab94f6255..44bebe06b 100644 --- a/pyatlan_v9/model/assets/dbt_model.py +++ b/pyatlan_v9/model/assets/dbt_model.py @@ -41,12 +41,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import ( - RelatedDbtMetric, - RelatedDbtModel, - RelatedDbtModelColumn, - RelatedDbtTest, -) +from .dbt_related import RelatedDbtMetric, RelatedDbtModelColumn, RelatedDbtTest from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -136,6 +131,8 @@ class DbtModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtModel" + dbt_status: Union[str, None, UnsetType] = UNSET """Status of the dbt model.""" @@ -352,72 +349,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dbt_model_sql_assets is UNSET: - errors.append("dbt_model_sql_assets is required for creation") - if errors: - raise ValueError(f"DbtModel validation failed: {errors}") - - def minimize(self) -> "DbtModel": - """ - Return a minimal copy of this DbtModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtModel instance with only the minimum required fields. - """ - self.validate() - return DbtModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtModel": - """ - Create a :class:`RelatedDbtModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtModel(guid=self.guid) - return RelatedDbtModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -848,9 +779,6 @@ def _dbt_model_to_nested(dbt_model: DbtModel) -> DbtModelNested: is_incomplete=dbt_model.is_incomplete, provenance_type=dbt_model.provenance_type, home_id=dbt_model.home_id, - depth=dbt_model.depth, - immediate_upstream=dbt_model.immediate_upstream, - immediate_downstream=dbt_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -882,6 +810,7 @@ def _dbt_model_from_nested(nested: DbtModelNested) -> DbtModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -890,9 +819,6 @@ def _dbt_model_from_nested(nested: DbtModelNested) -> DbtModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_model_column.py b/pyatlan_v9/model/assets/dbt_model_column.py index e73075872..45c40afc6 100644 --- a/pyatlan_v9/model/assets/dbt_model_column.py +++ b/pyatlan_v9/model/assets/dbt_model_column.py @@ -40,12 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import ( - RelatedDbtModel, - RelatedDbtModelColumn, - RelatedDbtSeed, - RelatedDbtTest, -) +from .dbt_related import RelatedDbtModel, RelatedDbtSeed, RelatedDbtTest from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -125,6 +120,8 @@ class DbtModelColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtModelColumn" + dbt_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dbt model this column belongs to.""" @@ -309,74 +306,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtModelColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dbt_model is UNSET: - errors.append("dbt_model is required for creation") - if self.dbt_model_qualified_name is UNSET: - errors.append("dbt_model_qualified_name is required for creation") - if errors: - raise ValueError(f"DbtModelColumn validation failed: {errors}") - - def minimize(self) -> "DbtModelColumn": - """ - Return a minimal copy of this DbtModelColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtModelColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtModelColumn instance with only the minimum required fields. - """ - self.validate() - return DbtModelColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtModelColumn": - """ - Create a :class:`RelatedDbtModelColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtModelColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtModelColumn(guid=self.guid) - return RelatedDbtModelColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -761,9 +690,6 @@ def _dbt_model_column_to_nested( is_incomplete=dbt_model_column.is_incomplete, provenance_type=dbt_model_column.provenance_type, home_id=dbt_model_column.home_id, - depth=dbt_model_column.depth, - immediate_upstream=dbt_model_column.immediate_upstream, - immediate_downstream=dbt_model_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -797,6 +723,7 @@ def _dbt_model_column_from_nested(nested: DbtModelColumnNested) -> DbtModelColum updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -805,9 +732,6 @@ def _dbt_model_column_from_nested(nested: DbtModelColumnNested) -> DbtModelColum is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_model_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_process.py b/pyatlan_v9/model/assets/dbt_process.py index 1f4b7becb..16e5efa5a 100644 --- a/pyatlan_v9/model/assets/dbt_process.py +++ b/pyatlan_v9/model/assets/dbt_process.py @@ -42,7 +42,6 @@ from .catalog_related import RelatedCatalog from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtProcess from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector from .flow_related import RelatedFlowControlOperation @@ -140,6 +139,8 @@ class DbtProcess(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtProcess" + dbt_process_job_status: Union[str, None, UnsetType] = UNSET """Status of the dbt process job.""" @@ -357,66 +358,6 @@ class DbtProcess(Asset): def __post_init__(self) -> None: self.type_name = "DbtProcess" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtProcess instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtProcess validation failed: {errors}") - - def minimize(self) -> "DbtProcess": - """ - Return a minimal copy of this DbtProcess with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtProcess with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtProcess instance with only the minimum required fields. - """ - self.validate() - return DbtProcess(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtProcess": - """ - Create a :class:`RelatedDbtProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtProcess(guid=self.guid) - return RelatedDbtProcess(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -856,9 +797,6 @@ def _dbt_process_to_nested(dbt_process: DbtProcess) -> DbtProcessNested: is_incomplete=dbt_process.is_incomplete, provenance_type=dbt_process.provenance_type, home_id=dbt_process.home_id, - depth=dbt_process.depth, - immediate_upstream=dbt_process.immediate_upstream, - immediate_downstream=dbt_process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -890,6 +828,7 @@ def _dbt_process_from_nested(nested: DbtProcessNested) -> DbtProcess: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -898,9 +837,6 @@ def _dbt_process_from_nested(nested: DbtProcessNested) -> DbtProcess: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_process_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_seed.py b/pyatlan_v9/model/assets/dbt_seed.py index ef5944bbd..3d9514833 100644 --- a/pyatlan_v9/model/assets/dbt_seed.py +++ b/pyatlan_v9/model/assets/dbt_seed.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtModelColumn, RelatedDbtSeed +from .dbt_related import RelatedDbtModelColumn from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -116,6 +116,8 @@ class DbtSeed(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtSeed" + dbt_seed_file_path: Union[str, None, UnsetType] = UNSET """File path of the dbt seed.""" @@ -286,72 +288,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtSeed instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dbt_seed_sql_assets is UNSET: - errors.append("dbt_seed_sql_assets is required for creation") - if errors: - raise ValueError(f"DbtSeed validation failed: {errors}") - - def minimize(self) -> "DbtSeed": - """ - Return a minimal copy of this DbtSeed with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtSeed with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtSeed instance with only the minimum required fields. - """ - self.validate() - return DbtSeed(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtSeed": - """ - Create a :class:`RelatedDbtSeed` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtSeed reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtSeed(guid=self.guid) - return RelatedDbtSeed(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -711,9 +647,6 @@ def _dbt_seed_to_nested(dbt_seed: DbtSeed) -> DbtSeedNested: is_incomplete=dbt_seed.is_incomplete, provenance_type=dbt_seed.provenance_type, home_id=dbt_seed.home_id, - depth=dbt_seed.depth, - immediate_upstream=dbt_seed.immediate_upstream, - immediate_downstream=dbt_seed.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -743,6 +676,7 @@ def _dbt_seed_from_nested(nested: DbtSeedNested) -> DbtSeed: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -751,9 +685,6 @@ def _dbt_seed_from_nested(nested: DbtSeedNested) -> DbtSeed: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_seed_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_semantic_model.py b/pyatlan_v9/model/assets/dbt_semantic_model.py index 645cbf2e5..2c45cf456 100644 --- a/pyatlan_v9/model/assets/dbt_semantic_model.py +++ b/pyatlan_v9/model/assets/dbt_semantic_model.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtSemanticModel from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -118,6 +117,8 @@ class DbtSemanticModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtSemanticModel" + dbt_alias: Union[str, None, UnsetType] = UNSET """Alias of this asset in dbt.""" @@ -279,66 +280,6 @@ class DbtSemanticModel(Asset): def __post_init__(self) -> None: self.type_name = "DbtSemanticModel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtSemanticModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtSemanticModel validation failed: {errors}") - - def minimize(self) -> "DbtSemanticModel": - """ - Return a minimal copy of this DbtSemanticModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtSemanticModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtSemanticModel instance with only the minimum required fields. - """ - self.validate() - return DbtSemanticModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtSemanticModel": - """ - Create a :class:`RelatedDbtSemanticModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtSemanticModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtSemanticModel(guid=self.guid) - return RelatedDbtSemanticModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -702,9 +643,6 @@ def _dbt_semantic_model_to_nested( is_incomplete=dbt_semantic_model.is_incomplete, provenance_type=dbt_semantic_model.provenance_type, home_id=dbt_semantic_model.home_id, - depth=dbt_semantic_model.depth, - immediate_upstream=dbt_semantic_model.immediate_upstream, - immediate_downstream=dbt_semantic_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -738,6 +676,7 @@ def _dbt_semantic_model_from_nested(nested: DbtSemanticModelNested) -> DbtSemant updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -746,9 +685,6 @@ def _dbt_semantic_model_from_nested(nested: DbtSemanticModelNested) -> DbtSemant is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_semantic_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_source.py b/pyatlan_v9/model/assets/dbt_source.py index 3e87e62c3..9b2c8afb5 100644 --- a/pyatlan_v9/model/assets/dbt_source.py +++ b/pyatlan_v9/model/assets/dbt_source.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtSource, RelatedDbtTest +from .dbt_related import RelatedDbtTest from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -117,6 +117,8 @@ class DbtSource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtSource" + dbt_state: Union[str, None, UnsetType] = UNSET """State of the dbt source.""" @@ -290,72 +292,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtSource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sql_assets is UNSET: - errors.append("sql_assets is required for creation") - if errors: - raise ValueError(f"DbtSource validation failed: {errors}") - - def minimize(self) -> "DbtSource": - """ - Return a minimal copy of this DbtSource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtSource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtSource instance with only the minimum required fields. - """ - self.validate() - return DbtSource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtSource": - """ - Create a :class:`RelatedDbtSource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtSource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtSource(guid=self.guid) - return RelatedDbtSource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -719,9 +655,6 @@ def _dbt_source_to_nested(dbt_source: DbtSource) -> DbtSourceNested: is_incomplete=dbt_source.is_incomplete, provenance_type=dbt_source.provenance_type, home_id=dbt_source.home_id, - depth=dbt_source.depth, - immediate_upstream=dbt_source.immediate_upstream, - immediate_downstream=dbt_source.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -753,6 +686,7 @@ def _dbt_source_from_nested(nested: DbtSourceNested) -> DbtSource: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -761,9 +695,6 @@ def _dbt_source_from_nested(nested: DbtSourceNested) -> DbtSource: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_source_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_tag.py b/pyatlan_v9/model/assets/dbt_tag.py index 30c38955a..7d60b6388 100644 --- a/pyatlan_v9/model/assets/dbt_tag.py +++ b/pyatlan_v9/model/assets/dbt_tag.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtTag from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -115,6 +114,8 @@ class DbtTag(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtTag" + dbt_alias: Union[str, None, UnsetType] = UNSET """Alias of this asset in dbt.""" @@ -287,74 +288,6 @@ def __post_init__(self) -> None: r"^.+/account/[^/]+/project/[^/]+/tag/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtTag instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.tag_id is UNSET: - errors.append("tag_id is required for creation") - if self.tag_allowed_values is UNSET: - errors.append("tag_allowed_values is required for creation") - if self.mapped_classification_name is UNSET: - errors.append("mapped_classification_name is required for creation") - if errors: - raise ValueError(f"DbtTag validation failed: {errors}") - - def minimize(self) -> "DbtTag": - """ - Return a minimal copy of this DbtTag with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtTag with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtTag instance with only the minimum required fields. - """ - self.validate() - return DbtTag(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtTag": - """ - Create a :class:`RelatedDbtTag` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtTag reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtTag(guid=self.guid) - return RelatedDbtTag(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -716,9 +649,6 @@ def _dbt_tag_to_nested(dbt_tag: DbtTag) -> DbtTagNested: is_incomplete=dbt_tag.is_incomplete, provenance_type=dbt_tag.provenance_type, home_id=dbt_tag.home_id, - depth=dbt_tag.depth, - immediate_upstream=dbt_tag.immediate_upstream, - immediate_downstream=dbt_tag.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -748,6 +678,7 @@ def _dbt_tag_from_nested(nested: DbtTagNested) -> DbtTag: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -756,9 +687,6 @@ def _dbt_tag_from_nested(nested: DbtTagNested) -> DbtTag: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_tag_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dbt_test.py b/pyatlan_v9/model/assets/dbt_test.py index 86a40d944..0298fb239 100644 --- a/pyatlan_v9/model/assets/dbt_test.py +++ b/pyatlan_v9/model/assets/dbt_test.py @@ -40,12 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import ( - RelatedDbtModel, - RelatedDbtModelColumn, - RelatedDbtSource, - RelatedDbtTest, -) +from .dbt_related import RelatedDbtModel, RelatedDbtModelColumn, RelatedDbtSource from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -129,6 +124,8 @@ class DbtTest(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtTest" + dbt_test_status: Union[str, None, UnsetType] = UNSET """Details of the results of the test. For errors, it reads "ERROR".""" @@ -321,66 +318,6 @@ class DbtTest(Asset): def __post_init__(self) -> None: self.type_name = "DbtTest" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtTest instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtTest validation failed: {errors}") - - def minimize(self) -> "DbtTest": - """ - Return a minimal copy of this DbtTest with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtTest with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtTest instance with only the minimum required fields. - """ - self.validate() - return DbtTest(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtTest": - """ - Create a :class:`RelatedDbtTest` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtTest reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtTest(guid=self.guid) - return RelatedDbtTest(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -782,9 +719,6 @@ def _dbt_test_to_nested(dbt_test: DbtTest) -> DbtTestNested: is_incomplete=dbt_test.is_incomplete, provenance_type=dbt_test.provenance_type, home_id=dbt_test.home_id, - depth=dbt_test.depth, - immediate_upstream=dbt_test.immediate_upstream, - immediate_downstream=dbt_test.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -814,6 +748,7 @@ def _dbt_test_from_nested(nested: DbtTestNested) -> DbtTest: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -822,9 +757,6 @@ def _dbt_test_from_nested(nested: DbtTestNested) -> DbtTest: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_test_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/document_db.py b/pyatlan_v9/model/assets/document_db.py index 15f02971f..eb8517884 100644 --- a/pyatlan_v9/model/assets/document_db.py +++ b/pyatlan_v9/model/assets/document_db.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .document_db_related import RelatedDocumentDB from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -93,6 +92,8 @@ class DocumentDB(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DocumentDB" + no_sql_schema_definition: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="noSQLSchemaDefinition" ) @@ -193,66 +194,6 @@ class DocumentDB(Asset): def __post_init__(self) -> None: self.type_name = "DocumentDB" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DocumentDB instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DocumentDB validation failed: {errors}") - - def minimize(self) -> "DocumentDB": - """ - Return a minimal copy of this DocumentDB with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DocumentDB with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DocumentDB instance with only the minimum required fields. - """ - self.validate() - return DocumentDB(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDocumentDB": - """ - Create a :class:`RelatedDocumentDB` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDocumentDB reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDocumentDB(guid=self.guid) - return RelatedDocumentDB(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -506,9 +447,6 @@ def _document_db_to_nested(document_db: DocumentDB) -> DocumentDBNested: is_incomplete=document_db.is_incomplete, provenance_type=document_db.provenance_type, home_id=document_db.home_id, - depth=document_db.depth, - immediate_upstream=document_db.immediate_upstream, - immediate_downstream=document_db.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -540,6 +478,7 @@ def _document_db_from_nested(nested: DocumentDBNested) -> DocumentDB: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -548,9 +487,6 @@ def _document_db_from_nested(nested: DocumentDBNested) -> DocumentDB: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_document_db_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/document_db_collection.py b/pyatlan_v9/model/assets/document_db_collection.py index 3a026b3b6..e7dbc1a69 100644 --- a/pyatlan_v9/model/assets/document_db_collection.py +++ b/pyatlan_v9/model/assets/document_db_collection.py @@ -48,7 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .document_db_related import RelatedDocumentDBCollection, RelatedDocumentDBDatabase +from .document_db_related import RelatedDocumentDBDatabase from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -180,6 +180,8 @@ class DocumentDBCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DocumentDBCollection" + document_db_collection_subtype: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="documentDBCollectionSubtype" ) @@ -531,76 +533,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DocumentDBCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.document_db_database is UNSET: - errors.append("document_db_database is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DocumentDBCollection validation failed: {errors}") - - def minimize(self) -> "DocumentDBCollection": - """ - Return a minimal copy of this DocumentDBCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DocumentDBCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DocumentDBCollection instance with only the minimum required fields. - """ - self.validate() - return DocumentDBCollection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDocumentDBCollection": - """ - Create a :class:`RelatedDocumentDBCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDocumentDBCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDocumentDBCollection(guid=self.guid) - return RelatedDocumentDBCollection(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -1319,9 +1251,6 @@ def _document_db_collection_to_nested( is_incomplete=document_db_collection.is_incomplete, provenance_type=document_db_collection.provenance_type, home_id=document_db_collection.home_id, - depth=document_db_collection.depth, - immediate_upstream=document_db_collection.immediate_upstream, - immediate_downstream=document_db_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1357,6 +1286,7 @@ def _document_db_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1365,9 +1295,6 @@ def _document_db_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_document_db_collection_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/document_db_database.py b/pyatlan_v9/model/assets/document_db_database.py index 75bcd2751..602ad73a7 100644 --- a/pyatlan_v9/model/assets/document_db_database.py +++ b/pyatlan_v9/model/assets/document_db_database.py @@ -47,7 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .document_db_related import RelatedDocumentDBCollection, RelatedDocumentDBDatabase +from .document_db_related import RelatedDocumentDBCollection from .fabric_related import RelatedFabricWorkspace from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -133,6 +133,8 @@ class DocumentDBDatabase(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DocumentDBDatabase" + document_db_database_collection_count: Union[int, None, UnsetType] = msgspec.field( default=UNSET, name="documentDBDatabaseCollectionCount" ) @@ -333,66 +335,6 @@ class DocumentDBDatabase(Asset): def __post_init__(self) -> None: self.type_name = "DocumentDBDatabase" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DocumentDBDatabase instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DocumentDBDatabase validation failed: {errors}") - - def minimize(self) -> "DocumentDBDatabase": - """ - Return a minimal copy of this DocumentDBDatabase with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DocumentDBDatabase with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DocumentDBDatabase instance with only the minimum required fields. - """ - self.validate() - return DocumentDBDatabase(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDocumentDBDatabase": - """ - Create a :class:`RelatedDocumentDBDatabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDocumentDBDatabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDocumentDBDatabase(guid=self.guid) - return RelatedDocumentDBDatabase(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -846,9 +788,6 @@ def _document_db_database_to_nested( is_incomplete=document_db_database.is_incomplete, provenance_type=document_db_database.provenance_type, home_id=document_db_database.home_id, - depth=document_db_database.depth, - immediate_upstream=document_db_database.immediate_upstream, - immediate_downstream=document_db_database.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -884,6 +823,7 @@ def _document_db_database_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -892,9 +832,6 @@ def _document_db_database_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_document_db_database_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/domo.py b/pyatlan_v9/model/assets/domo.py index c1f6f0c45..98f868821 100644 --- a/pyatlan_v9/model/assets/domo.py +++ b/pyatlan_v9/model/assets/domo.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .domo_related import RelatedDomo from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -93,6 +92,8 @@ class Domo(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Domo" + domo_id: Union[str, None, UnsetType] = UNSET """Id of the Domo dataset.""" @@ -194,66 +195,6 @@ class Domo(Asset): def __post_init__(self) -> None: self.type_name = "Domo" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Domo instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Domo validation failed: {errors}") - - def minimize(self) -> "Domo": - """ - Return a minimal copy of this Domo with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Domo with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Domo instance with only the minimum required fields. - """ - self.validate() - return Domo(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomo": - """ - Create a :class:`RelatedDomo` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomo reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomo(guid=self.guid) - return RelatedDomo(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -506,9 +447,6 @@ def _domo_to_nested(domo: Domo) -> DomoNested: is_incomplete=domo.is_incomplete, provenance_type=domo.provenance_type, home_id=domo.home_id, - depth=domo.depth, - immediate_upstream=domo.immediate_upstream, - immediate_downstream=domo.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -538,6 +476,7 @@ def _domo_from_nested(nested: DomoNested) -> Domo: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -546,9 +485,6 @@ def _domo_from_nested(nested: DomoNested) -> Domo: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/domo_card.py b/pyatlan_v9/model/assets/domo_card.py index ff9ad181e..f190b94e6 100644 --- a/pyatlan_v9/model/assets/domo_card.py +++ b/pyatlan_v9/model/assets/domo_card.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .domo_related import RelatedDomoCard, RelatedDomoDashboard, RelatedDomoDataset +from .domo_related import RelatedDomoDashboard, RelatedDomoDataset from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -99,6 +99,8 @@ class DomoCard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DomoCard" + domo_card_type: Union[str, None, UnsetType] = UNSET """Type of the Domo Card.""" @@ -221,72 +223,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DomoCard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.domo_dataset is UNSET: - errors.append("domo_dataset is required for creation") - if errors: - raise ValueError(f"DomoCard validation failed: {errors}") - - def minimize(self) -> "DomoCard": - """ - Return a minimal copy of this DomoCard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DomoCard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DomoCard instance with only the minimum required fields. - """ - self.validate() - return DomoCard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomoCard": - """ - Create a :class:`RelatedDomoCard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomoCard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomoCard(guid=self.guid) - return RelatedDomoCard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -566,9 +502,6 @@ def _domo_card_to_nested(domo_card: DomoCard) -> DomoCardNested: is_incomplete=domo_card.is_incomplete, provenance_type=domo_card.provenance_type, home_id=domo_card.home_id, - depth=domo_card.depth, - immediate_upstream=domo_card.immediate_upstream, - immediate_downstream=domo_card.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -600,6 +533,7 @@ def _domo_card_from_nested(nested: DomoCardNested) -> DomoCard: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -608,9 +542,6 @@ def _domo_card_from_nested(nested: DomoCardNested) -> DomoCard: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_card_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/domo_dashboard.py b/pyatlan_v9/model/assets/domo_dashboard.py index af6b9ea9e..ba9fa2385 100644 --- a/pyatlan_v9/model/assets/domo_dashboard.py +++ b/pyatlan_v9/model/assets/domo_dashboard.py @@ -98,6 +98,8 @@ class DomoDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DomoDashboard" + domo_dashboard_card_count: Union[int, None, UnsetType] = UNSET """Number of cards linked to this dashboard.""" @@ -217,70 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DomoDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"DomoDashboard validation failed: {errors}") - - def minimize(self) -> "DomoDashboard": - """ - Return a minimal copy of this DomoDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DomoDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DomoDashboard instance with only the minimum required fields. - """ - self.validate() - return DomoDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomoDashboard": - """ - Create a :class:`RelatedDomoDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomoDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomoDashboard(guid=self.guid) - return RelatedDomoDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -558,9 +496,6 @@ def _domo_dashboard_to_nested(domo_dashboard: DomoDashboard) -> DomoDashboardNes is_incomplete=domo_dashboard.is_incomplete, provenance_type=domo_dashboard.provenance_type, home_id=domo_dashboard.home_id, - depth=domo_dashboard.depth, - immediate_upstream=domo_dashboard.immediate_upstream, - immediate_downstream=domo_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -594,6 +529,7 @@ def _domo_dashboard_from_nested(nested: DomoDashboardNested) -> DomoDashboard: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -602,9 +538,6 @@ def _domo_dashboard_from_nested(nested: DomoDashboardNested) -> DomoDashboard: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/domo_dataset.py b/pyatlan_v9/model/assets/domo_dataset.py index 7ff2ca534..1702deb82 100644 --- a/pyatlan_v9/model/assets/domo_dataset.py +++ b/pyatlan_v9/model/assets/domo_dataset.py @@ -39,7 +39,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .domo_related import RelatedDomoCard, RelatedDomoDataset, RelatedDomoDatasetColumn +from .domo_related import RelatedDomoCard, RelatedDomoDatasetColumn from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -100,6 +100,8 @@ class DomoDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DomoDataset" + domo_dataset_row_count: Union[int, None, UnsetType] = UNSET """Number of rows in the Domo dataset.""" @@ -222,66 +224,6 @@ class DomoDataset(Asset): def __post_init__(self) -> None: self.type_name = "DomoDataset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DomoDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DomoDataset validation failed: {errors}") - - def minimize(self) -> "DomoDataset": - """ - Return a minimal copy of this DomoDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DomoDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DomoDataset instance with only the minimum required fields. - """ - self.validate() - return DomoDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomoDataset": - """ - Create a :class:`RelatedDomoDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomoDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomoDataset(guid=self.guid) - return RelatedDomoDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -573,9 +515,6 @@ def _domo_dataset_to_nested(domo_dataset: DomoDataset) -> DomoDatasetNested: is_incomplete=domo_dataset.is_incomplete, provenance_type=domo_dataset.provenance_type, home_id=domo_dataset.home_id, - depth=domo_dataset.depth, - immediate_upstream=domo_dataset.immediate_upstream, - immediate_downstream=domo_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -607,6 +546,7 @@ def _domo_dataset_from_nested(nested: DomoDatasetNested) -> DomoDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -615,9 +555,6 @@ def _domo_dataset_from_nested(nested: DomoDatasetNested) -> DomoDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/domo_dataset_column.py b/pyatlan_v9/model/assets/domo_dataset_column.py index c3c5f7edd..6bade78d8 100644 --- a/pyatlan_v9/model/assets/domo_dataset_column.py +++ b/pyatlan_v9/model/assets/domo_dataset_column.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .domo_related import RelatedDomoDataset, RelatedDomoDatasetColumn +from .domo_related import RelatedDomoDataset from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -99,6 +99,8 @@ class DomoDatasetColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DomoDatasetColumn" + domo_dataset_column_type: Union[str, None, UnsetType] = UNSET """Type of Domo Dataset Column.""" @@ -221,74 +223,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DomoDatasetColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.domo_dataset is UNSET: - errors.append("domo_dataset is required for creation") - if self.domo_dataset_qualified_name is UNSET: - errors.append("domo_dataset_qualified_name is required for creation") - if errors: - raise ValueError(f"DomoDatasetColumn validation failed: {errors}") - - def minimize(self) -> "DomoDatasetColumn": - """ - Return a minimal copy of this DomoDatasetColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DomoDatasetColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DomoDatasetColumn instance with only the minimum required fields. - """ - self.validate() - return DomoDatasetColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomoDatasetColumn": - """ - Create a :class:`RelatedDomoDatasetColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomoDatasetColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomoDatasetColumn(guid=self.guid) - return RelatedDomoDatasetColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -581,9 +515,6 @@ def _domo_dataset_column_to_nested( is_incomplete=domo_dataset_column.is_incomplete, provenance_type=domo_dataset_column.provenance_type, home_id=domo_dataset_column.home_id, - depth=domo_dataset_column.depth, - immediate_upstream=domo_dataset_column.immediate_upstream, - immediate_downstream=domo_dataset_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -619,6 +550,7 @@ def _domo_dataset_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -627,9 +559,6 @@ def _domo_dataset_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_dataset_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dremio.py b/pyatlan_v9/model/assets/dremio.py index e5e68d9ca..ba0f34c0c 100644 --- a/pyatlan_v9/model/assets/dremio.py +++ b/pyatlan_v9/model/assets/dremio.py @@ -46,7 +46,6 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import RelatedDremio from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -132,6 +131,8 @@ class Dremio(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Dremio" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -332,66 +333,6 @@ class Dremio(Asset): def __post_init__(self) -> None: self.type_name = "Dremio" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Dremio instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Dremio validation failed: {errors}") - - def minimize(self) -> "Dremio": - """ - Return a minimal copy of this Dremio with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Dremio with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Dremio instance with only the minimum required fields. - """ - self.validate() - return Dremio(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremio": - """ - Create a :class:`RelatedDremio` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremio reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremio(guid=self.guid) - return RelatedDremio(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -806,9 +747,6 @@ def _dremio_to_nested(dremio: Dremio) -> DremioNested: is_incomplete=dremio.is_incomplete, provenance_type=dremio.provenance_type, home_id=dremio.home_id, - depth=dremio.depth, - immediate_upstream=dremio.immediate_upstream, - immediate_downstream=dremio.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -838,6 +776,7 @@ def _dremio_from_nested(nested: DremioNested) -> Dremio: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -846,9 +785,6 @@ def _dremio_from_nested(nested: DremioNested) -> Dremio: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dremio_column.py b/pyatlan_v9/model/assets/dremio_column.py index f62382449..bf913f1f9 100644 --- a/pyatlan_v9/model/assets/dremio_column.py +++ b/pyatlan_v9/model/assets/dremio_column.py @@ -49,7 +49,6 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import RelatedDremioColumn from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection @@ -236,6 +235,8 @@ class DremioColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioColumn" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -710,69 +711,6 @@ class DremioColumn(Asset): def __post_init__(self) -> None: self.type_name = "DremioColumn" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.order is UNSET: - errors.append("order is required for creation") - if errors: - raise ValueError(f"DremioColumn validation failed: {errors}") - - def minimize(self) -> "DremioColumn": - """ - Return a minimal copy of this DremioColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioColumn instance with only the minimum required fields. - """ - self.validate() - return DremioColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioColumn": - """ - Create a :class:`RelatedDremioColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioColumn(guid=self.guid) - return RelatedDremioColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1625,9 +1563,6 @@ def _dremio_column_to_nested(dremio_column: DremioColumn) -> DremioColumnNested: is_incomplete=dremio_column.is_incomplete, provenance_type=dremio_column.provenance_type, home_id=dremio_column.home_id, - depth=dremio_column.depth, - immediate_upstream=dremio_column.immediate_upstream, - immediate_downstream=dremio_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1661,6 +1596,7 @@ def _dremio_column_from_nested(nested: DremioColumnNested) -> DremioColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1669,9 +1605,6 @@ def _dremio_column_from_nested(nested: DremioColumnNested) -> DremioColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dremio_folder.py b/pyatlan_v9/model/assets/dremio_folder.py index 7fa796f08..9c694d008 100644 --- a/pyatlan_v9/model/assets/dremio_folder.py +++ b/pyatlan_v9/model/assets/dremio_folder.py @@ -146,6 +146,8 @@ class DremioFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioFolder" + dremio_parent_asset_type: Union[str, None, UnsetType] = UNSET """Type of top level asset that contains this folder.""" @@ -377,76 +379,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dremio_source is UNSET: - errors.append("dremio_source is required for creation") - if self.dremio_source_name is UNSET: - errors.append("dremio_source_name is required for creation") - if self.dremio_source_qualified_name is UNSET: - errors.append("dremio_source_qualified_name is required for creation") - if errors: - raise ValueError(f"DremioFolder validation failed: {errors}") - - def minimize(self) -> "DremioFolder": - """ - Return a minimal copy of this DremioFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioFolder instance with only the minimum required fields. - """ - self.validate() - return DremioFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioFolder": - """ - Create a :class:`RelatedDremioFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioFolder(guid=self.guid) - return RelatedDremioFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -898,9 +830,6 @@ def _dremio_folder_to_nested(dremio_folder: DremioFolder) -> DremioFolderNested: is_incomplete=dremio_folder.is_incomplete, provenance_type=dremio_folder.provenance_type, home_id=dremio_folder.home_id, - depth=dremio_folder.depth, - immediate_upstream=dremio_folder.immediate_upstream, - immediate_downstream=dremio_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -934,6 +863,7 @@ def _dremio_folder_from_nested(nested: DremioFolderNested) -> DremioFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -942,9 +872,6 @@ def _dremio_folder_from_nested(nested: DremioFolderNested) -> DremioFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_folder_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dremio_physical_dataset.py b/pyatlan_v9/model/assets/dremio_physical_dataset.py index 3f1bdadb4..e0885c110 100644 --- a/pyatlan_v9/model/assets/dremio_physical_dataset.py +++ b/pyatlan_v9/model/assets/dremio_physical_dataset.py @@ -47,11 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import ( - RelatedDremioFolder, - RelatedDremioPhysicalDataset, - RelatedDremioSource, -) +from .dremio_related import RelatedDremioFolder, RelatedDremioSource from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -179,6 +175,8 @@ class DremioPhysicalDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioPhysicalDataset" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -490,76 +488,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioPhysicalDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dremio_source is UNSET: - errors.append("dremio_source is required for creation") - if self.dremio_source_name is UNSET: - errors.append("dremio_source_name is required for creation") - if self.dremio_source_qualified_name is UNSET: - errors.append("dremio_source_qualified_name is required for creation") - if errors: - raise ValueError(f"DremioPhysicalDataset validation failed: {errors}") - - def minimize(self) -> "DremioPhysicalDataset": - """ - Return a minimal copy of this DremioPhysicalDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioPhysicalDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioPhysicalDataset instance with only the minimum required fields. - """ - self.validate() - return DremioPhysicalDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioPhysicalDataset": - """ - Create a :class:`RelatedDremioPhysicalDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioPhysicalDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioPhysicalDataset(guid=self.guid) - return RelatedDremioPhysicalDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1153,9 +1081,6 @@ def _dremio_physical_dataset_to_nested( is_incomplete=dremio_physical_dataset.is_incomplete, provenance_type=dremio_physical_dataset.provenance_type, home_id=dremio_physical_dataset.home_id, - depth=dremio_physical_dataset.depth, - immediate_upstream=dremio_physical_dataset.immediate_upstream, - immediate_downstream=dremio_physical_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1191,6 +1116,7 @@ def _dremio_physical_dataset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1199,9 +1125,6 @@ def _dremio_physical_dataset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_physical_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dremio_source.py b/pyatlan_v9/model/assets/dremio_source.py index fc64251e1..c324aa444 100644 --- a/pyatlan_v9/model/assets/dremio_source.py +++ b/pyatlan_v9/model/assets/dremio_source.py @@ -46,11 +46,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import ( - RelatedDremioFolder, - RelatedDremioPhysicalDataset, - RelatedDremioSource, -) +from .dremio_related import RelatedDremioFolder, RelatedDremioPhysicalDataset from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -144,6 +140,8 @@ class DremioSource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioSource" + dremio_source_type: Union[str, None, UnsetType] = UNSET """Type of external source.""" @@ -370,66 +368,6 @@ class DremioSource(Asset): def __post_init__(self) -> None: self.type_name = "DremioSource" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioSource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DremioSource validation failed: {errors}") - - def minimize(self) -> "DremioSource": - """ - Return a minimal copy of this DremioSource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioSource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioSource instance with only the minimum required fields. - """ - self.validate() - return DremioSource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioSource": - """ - Create a :class:`RelatedDremioSource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioSource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioSource(guid=self.guid) - return RelatedDremioSource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -892,9 +830,6 @@ def _dremio_source_to_nested(dremio_source: DremioSource) -> DremioSourceNested: is_incomplete=dremio_source.is_incomplete, provenance_type=dremio_source.provenance_type, home_id=dremio_source.home_id, - depth=dremio_source.depth, - immediate_upstream=dremio_source.immediate_upstream, - immediate_downstream=dremio_source.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -928,6 +863,7 @@ def _dremio_source_from_nested(nested: DremioSourceNested) -> DremioSource: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -936,9 +872,6 @@ def _dremio_source_from_nested(nested: DremioSourceNested) -> DremioSource: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_source_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dremio_space.py b/pyatlan_v9/model/assets/dremio_space.py index adeaea0c1..e92723b14 100644 --- a/pyatlan_v9/model/assets/dremio_space.py +++ b/pyatlan_v9/model/assets/dremio_space.py @@ -46,11 +46,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import ( - RelatedDremioFolder, - RelatedDremioSpace, - RelatedDremioVirtualDataset, -) +from .dremio_related import RelatedDremioFolder, RelatedDremioVirtualDataset from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -138,6 +134,8 @@ class DremioSpace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioSpace" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -346,66 +344,6 @@ class DremioSpace(Asset): def __post_init__(self) -> None: self.type_name = "DremioSpace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioSpace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DremioSpace validation failed: {errors}") - - def minimize(self) -> "DremioSpace": - """ - Return a minimal copy of this DremioSpace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioSpace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioSpace instance with only the minimum required fields. - """ - self.validate() - return DremioSpace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioSpace": - """ - Create a :class:`RelatedDremioSpace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioSpace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioSpace(guid=self.guid) - return RelatedDremioSpace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -832,9 +770,6 @@ def _dremio_space_to_nested(dremio_space: DremioSpace) -> DremioSpaceNested: is_incomplete=dremio_space.is_incomplete, provenance_type=dremio_space.provenance_type, home_id=dremio_space.home_id, - depth=dremio_space.depth, - immediate_upstream=dremio_space.immediate_upstream, - immediate_downstream=dremio_space.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -866,6 +801,7 @@ def _dremio_space_from_nested(nested: DremioSpaceNested) -> DremioSpace: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -874,9 +810,6 @@ def _dremio_space_from_nested(nested: DremioSpaceNested) -> DremioSpace: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_space_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dremio_virtual_dataset.py b/pyatlan_v9/model/assets/dremio_virtual_dataset.py index 46bbd7302..807c7588c 100644 --- a/pyatlan_v9/model/assets/dremio_virtual_dataset.py +++ b/pyatlan_v9/model/assets/dremio_virtual_dataset.py @@ -47,11 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import ( - RelatedDremioFolder, - RelatedDremioSpace, - RelatedDremioVirtualDataset, -) +from .dremio_related import RelatedDremioFolder, RelatedDremioSpace from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -151,6 +147,8 @@ class DremioVirtualDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioVirtualDataset" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -396,76 +394,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioVirtualDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dremio_space is UNSET: - errors.append("dremio_space is required for creation") - if self.dremio_space_name is UNSET: - errors.append("dremio_space_name is required for creation") - if self.dremio_space_qualified_name is UNSET: - errors.append("dremio_space_qualified_name is required for creation") - if errors: - raise ValueError(f"DremioVirtualDataset validation failed: {errors}") - - def minimize(self) -> "DremioVirtualDataset": - """ - Return a minimal copy of this DremioVirtualDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioVirtualDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioVirtualDataset instance with only the minimum required fields. - """ - self.validate() - return DremioVirtualDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioVirtualDataset": - """ - Create a :class:`RelatedDremioVirtualDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioVirtualDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioVirtualDataset(guid=self.guid) - return RelatedDremioVirtualDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -952,9 +880,6 @@ def _dremio_virtual_dataset_to_nested( is_incomplete=dremio_virtual_dataset.is_incomplete, provenance_type=dremio_virtual_dataset.provenance_type, home_id=dremio_virtual_dataset.home_id, - depth=dremio_virtual_dataset.depth, - immediate_upstream=dremio_virtual_dataset.immediate_upstream, - immediate_downstream=dremio_virtual_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -990,6 +915,7 @@ def _dremio_virtual_dataset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -998,9 +924,6 @@ def _dremio_virtual_dataset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_virtual_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dynamo_db.py b/pyatlan_v9/model/assets/dynamo_db.py index 8955e6a1a..f6e91bd05 100644 --- a/pyatlan_v9/model/assets/dynamo_db.py +++ b/pyatlan_v9/model/assets/dynamo_db.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dynamo_db_related import RelatedDynamoDB from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +97,8 @@ class DynamoDB(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DynamoDB" + dynamo_db_status: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="dynamoDBStatus" ) @@ -223,66 +224,6 @@ class DynamoDB(Asset): def __post_init__(self) -> None: self.type_name = "DynamoDB" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DynamoDB instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DynamoDB validation failed: {errors}") - - def minimize(self) -> "DynamoDB": - """ - Return a minimal copy of this DynamoDB with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DynamoDB with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DynamoDB instance with only the minimum required fields. - """ - self.validate() - return DynamoDB(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDynamoDB": - """ - Create a :class:`RelatedDynamoDB` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDynamoDB reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDynamoDB(guid=self.guid) - return RelatedDynamoDB(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -571,9 +512,6 @@ def _dynamo_db_to_nested(dynamo_db: DynamoDB) -> DynamoDBNested: is_incomplete=dynamo_db.is_incomplete, provenance_type=dynamo_db.provenance_type, home_id=dynamo_db.home_id, - depth=dynamo_db.depth, - immediate_upstream=dynamo_db.immediate_upstream, - immediate_downstream=dynamo_db.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -605,6 +543,7 @@ def _dynamo_db_from_nested(nested: DynamoDBNested) -> DynamoDB: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -613,9 +552,6 @@ def _dynamo_db_from_nested(nested: DynamoDBNested) -> DynamoDB: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dynamo_db_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dynamo_db_attribute.py b/pyatlan_v9/model/assets/dynamo_db_attribute.py index d5999f647..14753b987 100644 --- a/pyatlan_v9/model/assets/dynamo_db_attribute.py +++ b/pyatlan_v9/model/assets/dynamo_db_attribute.py @@ -50,7 +50,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dynamo_db_related import RelatedDynamoDBAttribute, RelatedDynamoDBTable +from .dynamo_db_related import RelatedDynamoDBTable from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection @@ -236,6 +236,8 @@ class DynamoDBAttribute(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DynamoDBAttribute" + dynamo_db_status: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="dynamoDBStatus" ) @@ -727,78 +729,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DynamoDBAttribute instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.table is UNSET: - errors.append("table is required for creation") - if self.table_name is UNSET: - errors.append("table_name is required for creation") - if self.table_qualified_name is UNSET: - errors.append("table_qualified_name is required for creation") - if self.order is UNSET: - errors.append("order is required for creation") - if errors: - raise ValueError(f"DynamoDBAttribute validation failed: {errors}") - - def minimize(self) -> "DynamoDBAttribute": - """ - Return a minimal copy of this DynamoDBAttribute with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DynamoDBAttribute with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DynamoDBAttribute instance with only the minimum required fields. - """ - self.validate() - return DynamoDBAttribute(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDynamoDBAttribute": - """ - Create a :class:`RelatedDynamoDBAttribute` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDynamoDBAttribute reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDynamoDBAttribute(guid=self.guid) - return RelatedDynamoDBAttribute(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1663,9 +1593,6 @@ def _dynamo_db_attribute_to_nested( is_incomplete=dynamo_db_attribute.is_incomplete, provenance_type=dynamo_db_attribute.provenance_type, home_id=dynamo_db_attribute.home_id, - depth=dynamo_db_attribute.depth, - immediate_upstream=dynamo_db_attribute.immediate_upstream, - immediate_downstream=dynamo_db_attribute.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1701,6 +1628,7 @@ def _dynamo_db_attribute_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1709,9 +1637,6 @@ def _dynamo_db_attribute_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dynamo_db_attribute_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/dynamo_db_related.py b/pyatlan_v9/model/assets/dynamo_db_related.py index 1fffc76a1..cd953bb54 100644 --- a/pyatlan_v9/model/assets/dynamo_db_related.py +++ b/pyatlan_v9/model/assets/dynamo_db_related.py @@ -94,13 +94,13 @@ class RelatedDynamoDBTable(RelatedDynamoDB): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DynamoDBTable" so it serializes correctly - dynamo_db_table_gsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableGSICount" + dynamo_dbgsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBGSICount" ) """Represents the number of global secondary indexes on the table.""" - dynamo_db_table_lsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableLSICount" + dynamo_dblsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBLSICount" ) """Represents the number of local secondary indexes on the table.""" @@ -119,8 +119,8 @@ class RelatedDynamoDBSecondaryIndex(RelatedDynamoDB): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DynamoDBSecondaryIndex" so it serializes correctly - dynamo_db_secondary_index_projection_type: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="dynamoDBSecondaryIndexProjectionType") + dynamo_db_projection_type: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBProjectionType" ) """Specifies attributes that are projected from the DynamoDB table into the index.""" diff --git a/pyatlan_v9/model/assets/dynamo_db_secondary_index.py b/pyatlan_v9/model/assets/dynamo_db_secondary_index.py index 98394e903..3923fbfff 100644 --- a/pyatlan_v9/model/assets/dynamo_db_secondary_index.py +++ b/pyatlan_v9/model/assets/dynamo_db_secondary_index.py @@ -46,7 +46,6 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dynamo_db_related import RelatedDynamoDBSecondaryIndex from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -77,7 +76,7 @@ class DynamoDBSecondaryIndex(Asset): Represents a DynamoDB secondary index asset in Atlan. """ - DYNAMO_DB_SECONDARY_INDEX_PROJECTION_TYPE: ClassVar[Any] = None + DYNAMO_DB_PROJECTION_TYPE: ClassVar[Any] = None DYNAMO_DB_STATUS: ClassVar[Any] = None DYNAMO_DB_PARTITION_KEY: ClassVar[Any] = None DYNAMO_DB_SORT_KEY: ClassVar[Any] = None @@ -171,8 +170,10 @@ class DynamoDBSecondaryIndex(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - dynamo_db_secondary_index_projection_type: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="dynamoDBSecondaryIndexProjectionType") + type_name: Union[str, UnsetType] = "DynamoDBSecondaryIndex" + + dynamo_db_projection_type: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBProjectionType" ) """Specifies attributes that are projected from the DynamoDB table into the index.""" @@ -481,68 +482,6 @@ class DynamoDBSecondaryIndex(Asset): def __post_init__(self) -> None: self.type_name = "DynamoDBSecondaryIndex" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DynamoDBSecondaryIndex instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DynamoDBSecondaryIndex validation failed: {errors}") - - def minimize(self) -> "DynamoDBSecondaryIndex": - """ - Return a minimal copy of this DynamoDBSecondaryIndex with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DynamoDBSecondaryIndex with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DynamoDBSecondaryIndex instance with only the minimum required fields. - """ - self.validate() - return DynamoDBSecondaryIndex( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDynamoDBSecondaryIndex": - """ - Create a :class:`RelatedDynamoDBSecondaryIndex` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDynamoDBSecondaryIndex reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDynamoDBSecondaryIndex(guid=self.guid) - return RelatedDynamoDBSecondaryIndex(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -600,8 +539,8 @@ def from_json( class DynamoDBSecondaryIndexAttributes(AssetAttributes): """DynamoDBSecondaryIndex-specific attributes for nested API format.""" - dynamo_db_secondary_index_projection_type: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="dynamoDBSecondaryIndexProjectionType") + dynamo_db_projection_type: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBProjectionType" ) """Specifies attributes that are projected from the DynamoDB table into the index.""" @@ -982,9 +921,7 @@ def _populate_dynamo_db_secondary_index_attrs( ) -> None: """Populate DynamoDBSecondaryIndex-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.dynamo_db_secondary_index_projection_type = ( - obj.dynamo_db_secondary_index_projection_type - ) + attrs.dynamo_db_projection_type = obj.dynamo_db_projection_type attrs.dynamo_db_status = obj.dynamo_db_status attrs.dynamo_db_partition_key = obj.dynamo_db_partition_key attrs.dynamo_db_sort_key = obj.dynamo_db_sort_key @@ -1043,9 +980,7 @@ def _extract_dynamo_db_secondary_index_attrs( ) -> dict: """Extract all DynamoDBSecondaryIndex attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["dynamo_db_secondary_index_projection_type"] = ( - attrs.dynamo_db_secondary_index_projection_type - ) + result["dynamo_db_projection_type"] = attrs.dynamo_db_projection_type result["dynamo_db_status"] = attrs.dynamo_db_status result["dynamo_db_partition_key"] = attrs.dynamo_db_partition_key result["dynamo_db_sort_key"] = attrs.dynamo_db_sort_key @@ -1139,9 +1074,6 @@ def _dynamo_db_secondary_index_to_nested( is_incomplete=dynamo_db_secondary_index.is_incomplete, provenance_type=dynamo_db_secondary_index.provenance_type, home_id=dynamo_db_secondary_index.home_id, - depth=dynamo_db_secondary_index.depth, - immediate_upstream=dynamo_db_secondary_index.immediate_upstream, - immediate_downstream=dynamo_db_secondary_index.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1177,6 +1109,7 @@ def _dynamo_db_secondary_index_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1185,9 +1118,6 @@ def _dynamo_db_secondary_index_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dynamo_db_secondary_index_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1219,8 +1149,8 @@ def _dynamo_db_secondary_index_from_nested_bytes( RelationField, ) -DynamoDBSecondaryIndex.DYNAMO_DB_SECONDARY_INDEX_PROJECTION_TYPE = KeywordField( - "dynamoDBSecondaryIndexProjectionType", "dynamoDBSecondaryIndexProjectionType" +DynamoDBSecondaryIndex.DYNAMO_DB_PROJECTION_TYPE = KeywordField( + "dynamoDBProjectionType", "dynamoDBProjectionType" ) DynamoDBSecondaryIndex.DYNAMO_DB_STATUS = KeywordField( "dynamoDBStatus", "dynamoDBStatus" diff --git a/pyatlan_v9/model/assets/dynamo_db_table.py b/pyatlan_v9/model/assets/dynamo_db_table.py index da37abf07..1b2b0ff53 100644 --- a/pyatlan_v9/model/assets/dynamo_db_table.py +++ b/pyatlan_v9/model/assets/dynamo_db_table.py @@ -50,7 +50,6 @@ RelatedDynamoDBAttribute, RelatedDynamoDBGlobalSecondaryIndex, RelatedDynamoDBLocalSecondaryIndex, - RelatedDynamoDBTable, ) from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -82,8 +81,8 @@ class DynamoDBTable(Asset): Represents a DynamoDB table asset in Atlan. """ - DYNAMO_DB_TABLE_GSI_COUNT: ClassVar[Any] = None - DYNAMO_DB_TABLE_LSI_COUNT: ClassVar[Any] = None + DYNAMO_DBGSI_COUNT: ClassVar[Any] = None + DYNAMO_DBLSI_COUNT: ClassVar[Any] = None DYNAMO_DB_STATUS: ClassVar[Any] = None DYNAMO_DB_PARTITION_KEY: ClassVar[Any] = None DYNAMO_DB_SORT_KEY: ClassVar[Any] = None @@ -180,13 +179,15 @@ class DynamoDBTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - dynamo_db_table_gsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableGSICount" + type_name: Union[str, UnsetType] = "DynamoDBTable" + + dynamo_dbgsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBGSICount" ) """Represents the number of global secondary indexes on the table.""" - dynamo_db_table_lsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableLSICount" + dynamo_dblsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBLSICount" ) """Represents the number of local secondary indexes on the table.""" @@ -510,66 +511,6 @@ class DynamoDBTable(Asset): def __post_init__(self) -> None: self.type_name = "DynamoDBTable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DynamoDBTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DynamoDBTable validation failed: {errors}") - - def minimize(self) -> "DynamoDBTable": - """ - Return a minimal copy of this DynamoDBTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DynamoDBTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DynamoDBTable instance with only the minimum required fields. - """ - self.validate() - return DynamoDBTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDynamoDBTable": - """ - Create a :class:`RelatedDynamoDBTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDynamoDBTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDynamoDBTable(guid=self.guid) - return RelatedDynamoDBTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -625,13 +566,13 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> DynamoDBTab class DynamoDBTableAttributes(AssetAttributes): """DynamoDBTable-specific attributes for nested API format.""" - dynamo_db_table_gsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableGSICount" + dynamo_dbgsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBGSICount" ) """Represents the number of global secondary indexes on the table.""" - dynamo_db_table_lsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableLSICount" + dynamo_dblsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBLSICount" ) """Represents the number of local secondary indexes on the table.""" @@ -1030,8 +971,8 @@ def _populate_dynamo_db_table_attrs( ) -> None: """Populate DynamoDBTable-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.dynamo_db_table_gsi_count = obj.dynamo_db_table_gsi_count - attrs.dynamo_db_table_lsi_count = obj.dynamo_db_table_lsi_count + attrs.dynamo_dbgsi_count = obj.dynamo_dbgsi_count + attrs.dynamo_dblsi_count = obj.dynamo_dblsi_count attrs.dynamo_db_status = obj.dynamo_db_status attrs.dynamo_db_partition_key = obj.dynamo_db_partition_key attrs.dynamo_db_sort_key = obj.dynamo_db_sort_key @@ -1088,8 +1029,8 @@ def _populate_dynamo_db_table_attrs( def _extract_dynamo_db_table_attrs(attrs: DynamoDBTableAttributes) -> dict: """Extract all DynamoDBTable attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["dynamo_db_table_gsi_count"] = attrs.dynamo_db_table_gsi_count - result["dynamo_db_table_lsi_count"] = attrs.dynamo_db_table_lsi_count + result["dynamo_dbgsi_count"] = attrs.dynamo_dbgsi_count + result["dynamo_dblsi_count"] = attrs.dynamo_dblsi_count result["dynamo_db_status"] = attrs.dynamo_db_status result["dynamo_db_partition_key"] = attrs.dynamo_db_partition_key result["dynamo_db_sort_key"] = attrs.dynamo_db_sort_key @@ -1181,9 +1122,6 @@ def _dynamo_db_table_to_nested(dynamo_db_table: DynamoDBTable) -> DynamoDBTableN is_incomplete=dynamo_db_table.is_incomplete, provenance_type=dynamo_db_table.provenance_type, home_id=dynamo_db_table.home_id, - depth=dynamo_db_table.depth, - immediate_upstream=dynamo_db_table.immediate_upstream, - immediate_downstream=dynamo_db_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1217,6 +1155,7 @@ def _dynamo_db_table_from_nested(nested: DynamoDBTableNested) -> DynamoDBTable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1225,9 +1164,6 @@ def _dynamo_db_table_from_nested(nested: DynamoDBTableNested) -> DynamoDBTable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dynamo_db_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1257,12 +1193,8 @@ def _dynamo_db_table_from_nested_bytes(data: bytes, serde: Serde) -> DynamoDBTab RelationField, ) -DynamoDBTable.DYNAMO_DB_TABLE_GSI_COUNT = NumericField( - "dynamoDBTableGSICount", "dynamoDBTableGSICount" -) -DynamoDBTable.DYNAMO_DB_TABLE_LSI_COUNT = NumericField( - "dynamoDBTableLSICount", "dynamoDBTableLSICount" -) +DynamoDBTable.DYNAMO_DBGSI_COUNT = NumericField("dynamoDBGSICount", "dynamoDBGSICount") +DynamoDBTable.DYNAMO_DBLSI_COUNT = NumericField("dynamoDBLSICount", "dynamoDBLSICount") DynamoDBTable.DYNAMO_DB_STATUS = KeywordField("dynamoDBStatus", "dynamoDBStatus") DynamoDBTable.DYNAMO_DB_PARTITION_KEY = KeywordField( "dynamoDBPartitionKey", "dynamoDBPartitionKey" diff --git a/pyatlan_v9/model/assets/entity.py b/pyatlan_v9/model/assets/entity.py index d7c5ba58f..7427d75c4 100644 --- a/pyatlan_v9/model/assets/entity.py +++ b/pyatlan_v9/model/assets/entity.py @@ -18,8 +18,6 @@ import msgspec from msgspec import UNSET, UnsetType -from .related_entity import SaveSemantic - class AtlasClassification( msgspec.Struct, kw_only=True, omit_defaults=True, rename="camel" @@ -175,18 +173,3 @@ class Entity(msgspec.Struct, kw_only=True, omit_defaults=True, rename="camel"): home_id: Union[str, UnsetType] = UNSET """Home identifier for distributed Atlas systems.""" - - # Lineage-specific fields (only populated in lineage API responses) - depth: Union[int, None, UnsetType] = UNSET - """Depth of this asset within lineage. Only available in assets retrieved via lineage.""" - - immediate_upstream: Union[List[Any], None, UnsetType] = UNSET - """Assets immediately upstream of this asset within lineage.""" - - immediate_downstream: Union[List[Any], None, UnsetType] = UNSET - """Assets immediately downstream of this asset within lineage.""" - - # Internal SDK fields (not sent to API) - semantic: Union[SaveSemantic, None, UnsetType] = UNSET - """Save semantic for relationship operations (REPLACE, APPEND, REMOVE). - Not serialized to JSON - used internally by ref_by_guid/ref_by_qualified_name.""" diff --git a/pyatlan_v9/model/assets/event_store.py b/pyatlan_v9/model/assets/event_store.py index 6aa699bf7..7e0030f3c 100644 --- a/pyatlan_v9/model/assets/event_store.py +++ b/pyatlan_v9/model/assets/event_store.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedEventStore from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class EventStore(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "EventStore" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class EventStore(Asset): def __post_init__(self) -> None: self.type_name = "EventStore" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this EventStore instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"EventStore validation failed: {errors}") - - def minimize(self) -> "EventStore": - """ - Return a minimal copy of this EventStore with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new EventStore with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new EventStore instance with only the minimum required fields. - """ - self.validate() - return EventStore(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedEventStore": - """ - Create a :class:`RelatedEventStore` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedEventStore reference to this asset. - """ - if self.guid is not UNSET: - return RelatedEventStore(guid=self.guid) - return RelatedEventStore(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _event_store_to_nested(event_store: EventStore) -> EventStoreNested: is_incomplete=event_store.is_incomplete, provenance_type=event_store.provenance_type, home_id=event_store.home_id, - depth=event_store.depth, - immediate_upstream=event_store.immediate_upstream, - immediate_downstream=event_store.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -527,6 +465,7 @@ def _event_store_from_nested(nested: EventStoreNested) -> EventStore: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -535,9 +474,6 @@ def _event_store_from_nested(nested: EventStoreNested) -> EventStore: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_event_store_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric.py b/pyatlan_v9/model/assets/fabric.py index 44d139b86..df5b74b27 100644 --- a/pyatlan_v9/model/assets/fabric.py +++ b/pyatlan_v9/model/assets/fabric.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabric from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +93,8 @@ class Fabric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Fabric" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -198,66 +199,6 @@ class Fabric(Asset): def __post_init__(self) -> None: self.type_name = "Fabric" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Fabric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Fabric validation failed: {errors}") - - def minimize(self) -> "Fabric": - """ - Return a minimal copy of this Fabric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Fabric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Fabric instance with only the minimum required fields. - """ - self.validate() - return Fabric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabric": - """ - Create a :class:`RelatedFabric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabric(guid=self.guid) - return RelatedFabric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -519,9 +460,6 @@ def _fabric_to_nested(fabric: Fabric) -> FabricNested: is_incomplete=fabric.is_incomplete, provenance_type=fabric.provenance_type, home_id=fabric.home_id, - depth=fabric.depth, - immediate_upstream=fabric.immediate_upstream, - immediate_downstream=fabric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -551,6 +489,7 @@ def _fabric_from_nested(nested: FabricNested) -> Fabric: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -559,9 +498,6 @@ def _fabric_from_nested(nested: FabricNested) -> Fabric: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_activity.py b/pyatlan_v9/model/assets/fabric_activity.py index 16e96abec..42f739a72 100644 --- a/pyatlan_v9/model/assets/fabric_activity.py +++ b/pyatlan_v9/model/assets/fabric_activity.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricActivity, RelatedFabricDataPipeline +from .fabric_related import RelatedFabricDataPipeline from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -99,6 +99,8 @@ class FabricActivity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricActivity" + fabric_data_pipeline_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric data pipeline that contains this asset.""" @@ -223,76 +225,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricActivity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_data_pipeline is UNSET: - errors.append("fabric_data_pipeline is required for creation") - if self.fabric_data_pipeline_qualified_name is UNSET: - errors.append( - "fabric_data_pipeline_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FabricActivity validation failed: {errors}") - - def minimize(self) -> "FabricActivity": - """ - Return a minimal copy of this FabricActivity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricActivity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricActivity instance with only the minimum required fields. - """ - self.validate() - return FabricActivity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricActivity": - """ - Create a :class:`RelatedFabricActivity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricActivity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricActivity(guid=self.guid) - return RelatedFabricActivity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -580,9 +512,6 @@ def _fabric_activity_to_nested(fabric_activity: FabricActivity) -> FabricActivit is_incomplete=fabric_activity.is_incomplete, provenance_type=fabric_activity.provenance_type, home_id=fabric_activity.home_id, - depth=fabric_activity.depth, - immediate_upstream=fabric_activity.immediate_upstream, - immediate_downstream=fabric_activity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -616,6 +545,7 @@ def _fabric_activity_from_nested(nested: FabricActivityNested) -> FabricActivity updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -624,9 +554,6 @@ def _fabric_activity_from_nested(nested: FabricActivityNested) -> FabricActivity is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_activity_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_dashboard.py b/pyatlan_v9/model/assets/fabric_dashboard.py index 596b6b95d..28a7d0d7c 100644 --- a/pyatlan_v9/model/assets/fabric_dashboard.py +++ b/pyatlan_v9/model/assets/fabric_dashboard.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricDashboard, RelatedFabricWorkspace +from .fabric_related import RelatedFabricWorkspace from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -96,6 +96,8 @@ class FabricDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricDashboard" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -209,72 +211,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricDashboard validation failed: {errors}") - - def minimize(self) -> "FabricDashboard": - """ - Return a minimal copy of this FabricDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricDashboard instance with only the minimum required fields. - """ - self.validate() - return FabricDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricDashboard": - """ - Create a :class:`RelatedFabricDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricDashboard(guid=self.guid) - return RelatedFabricDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -550,9 +486,6 @@ def _fabric_dashboard_to_nested( is_incomplete=fabric_dashboard.is_incomplete, provenance_type=fabric_dashboard.provenance_type, home_id=fabric_dashboard.home_id, - depth=fabric_dashboard.depth, - immediate_upstream=fabric_dashboard.immediate_upstream, - immediate_downstream=fabric_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -586,6 +519,7 @@ def _fabric_dashboard_from_nested(nested: FabricDashboardNested) -> FabricDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -594,9 +528,6 @@ def _fabric_dashboard_from_nested(nested: FabricDashboardNested) -> FabricDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_data_pipeline.py b/pyatlan_v9/model/assets/fabric_data_pipeline.py index dd90cd794..4ff510823 100644 --- a/pyatlan_v9/model/assets/fabric_data_pipeline.py +++ b/pyatlan_v9/model/assets/fabric_data_pipeline.py @@ -40,11 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricActivity, - RelatedFabricDataPipeline, - RelatedFabricWorkspace, -) +from .fabric_related import RelatedFabricActivity, RelatedFabricWorkspace from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -101,6 +97,8 @@ class FabricDataPipeline(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricDataPipeline" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -217,72 +215,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricDataPipeline instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricDataPipeline validation failed: {errors}") - - def minimize(self) -> "FabricDataPipeline": - """ - Return a minimal copy of this FabricDataPipeline with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricDataPipeline with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricDataPipeline instance with only the minimum required fields. - """ - self.validate() - return FabricDataPipeline(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricDataPipeline": - """ - Create a :class:`RelatedFabricDataPipeline` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricDataPipeline reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricDataPipeline(guid=self.guid) - return RelatedFabricDataPipeline(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -562,9 +494,6 @@ def _fabric_data_pipeline_to_nested( is_incomplete=fabric_data_pipeline.is_incomplete, provenance_type=fabric_data_pipeline.provenance_type, home_id=fabric_data_pipeline.home_id, - depth=fabric_data_pipeline.depth, - immediate_upstream=fabric_data_pipeline.immediate_upstream, - immediate_downstream=fabric_data_pipeline.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -600,6 +529,7 @@ def _fabric_data_pipeline_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -608,9 +538,6 @@ def _fabric_data_pipeline_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_data_pipeline_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_dataflow.py b/pyatlan_v9/model/assets/fabric_dataflow.py index c20a0d762..955559911 100644 --- a/pyatlan_v9/model/assets/fabric_dataflow.py +++ b/pyatlan_v9/model/assets/fabric_dataflow.py @@ -40,11 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricDataflow, - RelatedFabricDataflowEntityColumn, - RelatedFabricWorkspace, -) +from .fabric_related import RelatedFabricDataflowEntityColumn, RelatedFabricWorkspace from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -101,6 +97,8 @@ class FabricDataflow(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricDataflow" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -219,72 +217,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricDataflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricDataflow validation failed: {errors}") - - def minimize(self) -> "FabricDataflow": - """ - Return a minimal copy of this FabricDataflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricDataflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricDataflow instance with only the minimum required fields. - """ - self.validate() - return FabricDataflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricDataflow": - """ - Create a :class:`RelatedFabricDataflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricDataflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricDataflow(guid=self.guid) - return RelatedFabricDataflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -562,9 +494,6 @@ def _fabric_dataflow_to_nested(fabric_dataflow: FabricDataflow) -> FabricDataflo is_incomplete=fabric_dataflow.is_incomplete, provenance_type=fabric_dataflow.provenance_type, home_id=fabric_dataflow.home_id, - depth=fabric_dataflow.depth, - immediate_upstream=fabric_dataflow.immediate_upstream, - immediate_downstream=fabric_dataflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -598,6 +527,7 @@ def _fabric_dataflow_from_nested(nested: FabricDataflowNested) -> FabricDataflow updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -606,9 +536,6 @@ def _fabric_dataflow_from_nested(nested: FabricDataflowNested) -> FabricDataflow is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_dataflow_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_dataflow_entity_column.py b/pyatlan_v9/model/assets/fabric_dataflow_entity_column.py index 005203cff..d29063b5e 100644 --- a/pyatlan_v9/model/assets/fabric_dataflow_entity_column.py +++ b/pyatlan_v9/model/assets/fabric_dataflow_entity_column.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricDataflow, RelatedFabricDataflowEntityColumn +from .fabric_related import RelatedFabricDataflow from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +98,8 @@ class FabricDataflowEntityColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricDataflowEntityColumn" + fabric_dataflow_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric dataflow that contains this asset.""" @@ -219,78 +221,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricDataflowEntityColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_dataflow is UNSET: - errors.append("fabric_dataflow is required for creation") - if self.fabric_dataflow_name is UNSET: - errors.append("fabric_dataflow_name is required for creation") - if self.fabric_dataflow_qualified_name is UNSET: - errors.append("fabric_dataflow_qualified_name is required for creation") - if errors: - raise ValueError(f"FabricDataflowEntityColumn validation failed: {errors}") - - def minimize(self) -> "FabricDataflowEntityColumn": - """ - Return a minimal copy of this FabricDataflowEntityColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricDataflowEntityColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricDataflowEntityColumn instance with only the minimum required fields. - """ - self.validate() - return FabricDataflowEntityColumn( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedFabricDataflowEntityColumn": - """ - Create a :class:`RelatedFabricDataflowEntityColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricDataflowEntityColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricDataflowEntityColumn(guid=self.guid) - return RelatedFabricDataflowEntityColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -578,9 +508,6 @@ def _fabric_dataflow_entity_column_to_nested( is_incomplete=fabric_dataflow_entity_column.is_incomplete, provenance_type=fabric_dataflow_entity_column.provenance_type, home_id=fabric_dataflow_entity_column.home_id, - depth=fabric_dataflow_entity_column.depth, - immediate_upstream=fabric_dataflow_entity_column.immediate_upstream, - immediate_downstream=fabric_dataflow_entity_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -616,6 +543,7 @@ def _fabric_dataflow_entity_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -624,9 +552,6 @@ def _fabric_dataflow_entity_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_dataflow_entity_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_page.py b/pyatlan_v9/model/assets/fabric_page.py index 10b7b8f09..ae15c45a6 100644 --- a/pyatlan_v9/model/assets/fabric_page.py +++ b/pyatlan_v9/model/assets/fabric_page.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricPage, RelatedFabricReport, RelatedFabricVisual +from .fabric_related import RelatedFabricReport, RelatedFabricVisual from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +98,8 @@ class FabricPage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricPage" + fabric_report_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric report that contains this asset.""" @@ -219,74 +221,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricPage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_report is UNSET: - errors.append("fabric_report is required for creation") - if self.fabric_report_qualified_name is UNSET: - errors.append("fabric_report_qualified_name is required for creation") - if errors: - raise ValueError(f"FabricPage validation failed: {errors}") - - def minimize(self) -> "FabricPage": - """ - Return a minimal copy of this FabricPage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricPage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricPage instance with only the minimum required fields. - """ - self.validate() - return FabricPage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricPage": - """ - Create a :class:`RelatedFabricPage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricPage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricPage(guid=self.guid) - return RelatedFabricPage(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -561,9 +495,6 @@ def _fabric_page_to_nested(fabric_page: FabricPage) -> FabricPageNested: is_incomplete=fabric_page.is_incomplete, provenance_type=fabric_page.provenance_type, home_id=fabric_page.home_id, - depth=fabric_page.depth, - immediate_upstream=fabric_page.immediate_upstream, - immediate_downstream=fabric_page.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -595,6 +526,7 @@ def _fabric_page_from_nested(nested: FabricPageNested) -> FabricPage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -603,9 +535,6 @@ def _fabric_page_from_nested(nested: FabricPageNested) -> FabricPage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_page_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_report.py b/pyatlan_v9/model/assets/fabric_report.py index 7c506b6fa..9c4478f7d 100644 --- a/pyatlan_v9/model/assets/fabric_report.py +++ b/pyatlan_v9/model/assets/fabric_report.py @@ -40,11 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricPage, - RelatedFabricReport, - RelatedFabricWorkspace, -) +from .fabric_related import RelatedFabricPage, RelatedFabricWorkspace from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -101,6 +97,8 @@ class FabricReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricReport" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -217,72 +215,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricReport validation failed: {errors}") - - def minimize(self) -> "FabricReport": - """ - Return a minimal copy of this FabricReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricReport instance with only the minimum required fields. - """ - self.validate() - return FabricReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricReport": - """ - Create a :class:`RelatedFabricReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricReport(guid=self.guid) - return RelatedFabricReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -556,9 +488,6 @@ def _fabric_report_to_nested(fabric_report: FabricReport) -> FabricReportNested: is_incomplete=fabric_report.is_incomplete, provenance_type=fabric_report.provenance_type, home_id=fabric_report.home_id, - depth=fabric_report.depth, - immediate_upstream=fabric_report.immediate_upstream, - immediate_downstream=fabric_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -592,6 +521,7 @@ def _fabric_report_from_nested(nested: FabricReportNested) -> FabricReport: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -600,9 +530,6 @@ def _fabric_report_from_nested(nested: FabricReportNested) -> FabricReport: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_report_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_semantic_model.py b/pyatlan_v9/model/assets/fabric_semantic_model.py index 23071425f..a718f0de6 100644 --- a/pyatlan_v9/model/assets/fabric_semantic_model.py +++ b/pyatlan_v9/model/assets/fabric_semantic_model.py @@ -40,11 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricSemanticModel, - RelatedFabricSemanticModelTable, - RelatedFabricWorkspace, -) +from .fabric_related import RelatedFabricSemanticModelTable, RelatedFabricWorkspace from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -101,6 +97,8 @@ class FabricSemanticModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricSemanticModel" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -219,72 +217,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricSemanticModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricSemanticModel validation failed: {errors}") - - def minimize(self) -> "FabricSemanticModel": - """ - Return a minimal copy of this FabricSemanticModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricSemanticModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricSemanticModel instance with only the minimum required fields. - """ - self.validate() - return FabricSemanticModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricSemanticModel": - """ - Create a :class:`RelatedFabricSemanticModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricSemanticModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricSemanticModel(guid=self.guid) - return RelatedFabricSemanticModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -566,9 +498,6 @@ def _fabric_semantic_model_to_nested( is_incomplete=fabric_semantic_model.is_incomplete, provenance_type=fabric_semantic_model.provenance_type, home_id=fabric_semantic_model.home_id, - depth=fabric_semantic_model.depth, - immediate_upstream=fabric_semantic_model.immediate_upstream, - immediate_downstream=fabric_semantic_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -604,6 +533,7 @@ def _fabric_semantic_model_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -612,9 +542,6 @@ def _fabric_semantic_model_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_semantic_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_semantic_model_table.py b/pyatlan_v9/model/assets/fabric_semantic_model_table.py index fb3942bce..0a8baccf7 100644 --- a/pyatlan_v9/model/assets/fabric_semantic_model_table.py +++ b/pyatlan_v9/model/assets/fabric_semantic_model_table.py @@ -42,7 +42,6 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .fabric_related import ( RelatedFabricSemanticModel, - RelatedFabricSemanticModelTable, RelatedFabricSemanticModelTableColumn, ) from .gtc_related import RelatedAtlasGlossaryTerm @@ -102,6 +101,8 @@ class FabricSemanticModelTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricSemanticModelTable" + fabric_semantic_model_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric semantic model that contains this asset.""" @@ -225,78 +226,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricSemanticModelTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_semantic_model is UNSET: - errors.append("fabric_semantic_model is required for creation") - if self.fabric_semantic_model_qualified_name is UNSET: - errors.append( - "fabric_semantic_model_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FabricSemanticModelTable validation failed: {errors}") - - def minimize(self) -> "FabricSemanticModelTable": - """ - Return a minimal copy of this FabricSemanticModelTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricSemanticModelTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricSemanticModelTable instance with only the minimum required fields. - """ - self.validate() - return FabricSemanticModelTable( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedFabricSemanticModelTable": - """ - Create a :class:`RelatedFabricSemanticModelTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricSemanticModelTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricSemanticModelTable(guid=self.guid) - return RelatedFabricSemanticModelTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -589,9 +518,6 @@ def _fabric_semantic_model_table_to_nested( is_incomplete=fabric_semantic_model_table.is_incomplete, provenance_type=fabric_semantic_model_table.provenance_type, home_id=fabric_semantic_model_table.home_id, - depth=fabric_semantic_model_table.depth, - immediate_upstream=fabric_semantic_model_table.immediate_upstream, - immediate_downstream=fabric_semantic_model_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -627,6 +553,7 @@ def _fabric_semantic_model_table_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -635,9 +562,6 @@ def _fabric_semantic_model_table_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_semantic_model_table_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_semantic_model_table_column.py b/pyatlan_v9/model/assets/fabric_semantic_model_table_column.py index 8de4815a5..f88afe31d 100644 --- a/pyatlan_v9/model/assets/fabric_semantic_model_table_column.py +++ b/pyatlan_v9/model/assets/fabric_semantic_model_table_column.py @@ -40,10 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricSemanticModelTable, - RelatedFabricSemanticModelTableColumn, -) +from .fabric_related import RelatedFabricSemanticModelTable from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -101,6 +98,8 @@ class FabricSemanticModelTableColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricSemanticModelTableColumn" + fabric_semantic_model_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric semantic model table that contains this asset.""" @@ -224,84 +223,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricSemanticModelTableColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_semantic_model_table is UNSET: - errors.append("fabric_semantic_model_table is required for creation") - if self.fabric_semantic_model_table_name is UNSET: - errors.append( - "fabric_semantic_model_table_name is required for creation" - ) - if self.fabric_semantic_model_table_qualified_name is UNSET: - errors.append( - "fabric_semantic_model_table_qualified_name is required for creation" - ) - if errors: - raise ValueError( - f"FabricSemanticModelTableColumn validation failed: {errors}" - ) - - def minimize(self) -> "FabricSemanticModelTableColumn": - """ - Return a minimal copy of this FabricSemanticModelTableColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricSemanticModelTableColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricSemanticModelTableColumn instance with only the minimum required fields. - """ - self.validate() - return FabricSemanticModelTableColumn( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedFabricSemanticModelTableColumn": - """ - Create a :class:`RelatedFabricSemanticModelTableColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricSemanticModelTableColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricSemanticModelTableColumn(guid=self.guid) - return RelatedFabricSemanticModelTableColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -597,9 +518,6 @@ def _fabric_semantic_model_table_column_to_nested( is_incomplete=fabric_semantic_model_table_column.is_incomplete, provenance_type=fabric_semantic_model_table_column.provenance_type, home_id=fabric_semantic_model_table_column.home_id, - depth=fabric_semantic_model_table_column.depth, - immediate_upstream=fabric_semantic_model_table_column.immediate_upstream, - immediate_downstream=fabric_semantic_model_table_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -635,6 +553,7 @@ def _fabric_semantic_model_table_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -643,9 +562,6 @@ def _fabric_semantic_model_table_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_semantic_model_table_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_visual.py b/pyatlan_v9/model/assets/fabric_visual.py index 42944b82f..2fbdf909c 100644 --- a/pyatlan_v9/model/assets/fabric_visual.py +++ b/pyatlan_v9/model/assets/fabric_visual.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricPage, RelatedFabricVisual +from .fabric_related import RelatedFabricPage from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -99,6 +99,8 @@ class FabricVisual(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricVisual" + fabric_page_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric page that contains this asset.""" @@ -223,76 +225,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricVisual instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_page is UNSET: - errors.append("fabric_page is required for creation") - if self.fabric_page_name is UNSET: - errors.append("fabric_page_name is required for creation") - if self.fabric_page_qualified_name is UNSET: - errors.append("fabric_page_qualified_name is required for creation") - if errors: - raise ValueError(f"FabricVisual validation failed: {errors}") - - def minimize(self) -> "FabricVisual": - """ - Return a minimal copy of this FabricVisual with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricVisual with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricVisual instance with only the minimum required fields. - """ - self.validate() - return FabricVisual(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricVisual": - """ - Create a :class:`RelatedFabricVisual` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricVisual reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricVisual(guid=self.guid) - return RelatedFabricVisual(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -577,9 +509,6 @@ def _fabric_visual_to_nested(fabric_visual: FabricVisual) -> FabricVisualNested: is_incomplete=fabric_visual.is_incomplete, provenance_type=fabric_visual.provenance_type, home_id=fabric_visual.home_id, - depth=fabric_visual.depth, - immediate_upstream=fabric_visual.immediate_upstream, - immediate_downstream=fabric_visual.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -613,6 +542,7 @@ def _fabric_visual_from_nested(nested: FabricVisualNested) -> FabricVisual: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -621,9 +551,6 @@ def _fabric_visual_from_nested(nested: FabricVisualNested) -> FabricVisual: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_visual_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fabric_workspace.py b/pyatlan_v9/model/assets/fabric_workspace.py index 8a9c8075a..ecd3d09e6 100644 --- a/pyatlan_v9/model/assets/fabric_workspace.py +++ b/pyatlan_v9/model/assets/fabric_workspace.py @@ -45,7 +45,6 @@ RelatedFabricDataPipeline, RelatedFabricReport, RelatedFabricSemanticModel, - RelatedFabricWorkspace, ) from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -108,6 +107,8 @@ class FabricWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricWorkspace" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -234,66 +235,6 @@ class FabricWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "FabricWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FabricWorkspace validation failed: {errors}") - - def minimize(self) -> "FabricWorkspace": - """ - Return a minimal copy of this FabricWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricWorkspace instance with only the minimum required fields. - """ - self.validate() - return FabricWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricWorkspace": - """ - Create a :class:`RelatedFabricWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricWorkspace(guid=self.guid) - return RelatedFabricWorkspace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -593,9 +534,6 @@ def _fabric_workspace_to_nested( is_incomplete=fabric_workspace.is_incomplete, provenance_type=fabric_workspace.provenance_type, home_id=fabric_workspace.home_id, - depth=fabric_workspace.depth, - immediate_upstream=fabric_workspace.immediate_upstream, - immediate_downstream=fabric_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -629,6 +567,7 @@ def _fabric_workspace_from_nested(nested: FabricWorkspaceNested) -> FabricWorksp updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -637,9 +576,6 @@ def _fabric_workspace_from_nested(nested: FabricWorkspaceNested) -> FabricWorksp is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/file.py b/pyatlan_v9/model/assets/file.py index 68bd98d64..57dcfd6d1 100644 --- a/pyatlan_v9/model/assets/file.py +++ b/pyatlan_v9/model/assets/file.py @@ -99,6 +99,8 @@ class File(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "File" + file_type: Union[str, None, UnsetType] = UNSET """Type (extension) of the file.""" @@ -215,69 +217,6 @@ class File(Asset): def __post_init__(self) -> None: self.type_name = "File" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this File instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.file_type is UNSET: - errors.append("file_type is required for creation") - if errors: - raise ValueError(f"File validation failed: {errors}") - - def minimize(self) -> "File": - """ - Return a minimal copy of this File with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new File with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new File instance with only the minimum required fields. - """ - self.validate() - return File(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFile": - """ - Create a :class:`RelatedFile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFile(guid=self.guid) - return RelatedFile(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -640,9 +579,6 @@ def _file_to_nested(file: File) -> FileNested: is_incomplete=file.is_incomplete, provenance_type=file.provenance_type, home_id=file.home_id, - depth=file.depth, - immediate_upstream=file.immediate_upstream, - immediate_downstream=file.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -672,6 +608,7 @@ def _file_from_nested(nested: FileNested) -> File: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -680,9 +617,6 @@ def _file_from_nested(nested: FileNested) -> File: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_file_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fivetran.py b/pyatlan_v9/model/assets/fivetran.py index 03def8569..3e557b7ca 100644 --- a/pyatlan_v9/model/assets/fivetran.py +++ b/pyatlan_v9/model/assets/fivetran.py @@ -39,7 +39,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fivetran_related import RelatedFivetran from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +93,8 @@ class Fivetran(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Fivetran" + fivetran_workflow_name: Union[str, None, UnsetType] = UNSET """Name of the atlan fivetran workflow that updated this asset""" @@ -198,66 +199,6 @@ class Fivetran(Asset): def __post_init__(self) -> None: self.type_name = "Fivetran" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Fivetran instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Fivetran validation failed: {errors}") - - def minimize(self) -> "Fivetran": - """ - Return a minimal copy of this Fivetran with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Fivetran with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Fivetran instance with only the minimum required fields. - """ - self.validate() - return Fivetran(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFivetran": - """ - Create a :class:`RelatedFivetran` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFivetran reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFivetran(guid=self.guid) - return RelatedFivetran(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -521,9 +462,6 @@ def _fivetran_to_nested(fivetran: Fivetran) -> FivetranNested: is_incomplete=fivetran.is_incomplete, provenance_type=fivetran.provenance_type, home_id=fivetran.home_id, - depth=fivetran.depth, - immediate_upstream=fivetran.immediate_upstream, - immediate_downstream=fivetran.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -555,6 +493,7 @@ def _fivetran_from_nested(nested: FivetranNested) -> Fivetran: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -563,9 +502,6 @@ def _fivetran_from_nested(nested: FivetranNested) -> Fivetran: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fivetran_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/fivetran_connector.py b/pyatlan_v9/model/assets/fivetran_connector.py index 28f637988..d3717823d 100644 --- a/pyatlan_v9/model/assets/fivetran_connector.py +++ b/pyatlan_v9/model/assets/fivetran_connector.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fivetran_related import RelatedFivetranConnector from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -63,50 +62,44 @@ class FivetranConnector(Asset): Instance of a Fivetran connector asset in Atlan. """ - FIVETRAN_CONNECTOR_LAST_SYNC_ID: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_STARTED_AT: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_FINISHED_AT: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_REASON: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_TASK_TYPE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_RESCHEDULED_AT: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_TABLES_SYNCED: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_EXTRACT_TIME_SECONDS: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_EXTRACT_VOLUME_MEGABYTES: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_LOAD_TIME_SECONDS: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_LOAD_VOLUME_MEGABYTES: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_PROCESS_TIME_SECONDS: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_PROCESS_VOLUME_MEGABYTES: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_TOTAL_TIME_SECONDS: ClassVar[Any] = None - FIVETRAN_CONNECTOR_NAME: ClassVar[Any] = None - FIVETRAN_CONNECTOR_TYPE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_URL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_DESTINATION_NAME: ClassVar[Any] = None - FIVETRAN_CONNECTOR_DESTINATION_TYPE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_DESTINATION_URL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_SETUP_ON: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_FREQUENCY: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_PAUSED: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_SETUP_USER_FULL_NAME: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_SETUP_USER_EMAIL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_FREE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_PAID: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_TOTAL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_FREE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_PAID: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_TOTAL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_FREE_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = ( - None - ) - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_PAID_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = ( - None - ) - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_TOTAL_PERCENTAGE_OF_ACCOUNT: ClassVar[ - Any - ] = None - FIVETRAN_CONNECTOR_TOTAL_TABLES_SYNCED: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_ID: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_STARTED_AT: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_FINISHED_AT: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_REASON: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_TASK_TYPE: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_RESCHEDULED_AT: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_TABLES_SYNCED: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_EXTRACT_TIME_SECONDS: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_EXTRACT_VOLUME_MEGABYTES: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_LOAD_TIME_SECONDS: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_LOAD_VOLUME_MEGABYTES: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_PROCESS_TIME_SECONDS: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_PROCESS_VOLUME_MEGABYTES: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_TOTAL_TIME_SECONDS: ClassVar[Any] = None + FIVETRAN_NAME: ClassVar[Any] = None + FIVETRAN_TYPE: ClassVar[Any] = None + FIVETRAN_URL: ClassVar[Any] = None + FIVETRAN_DESTINATION_NAME: ClassVar[Any] = None + FIVETRAN_DESTINATION_TYPE: ClassVar[Any] = None + FIVETRAN_DESTINATION_URL: ClassVar[Any] = None + FIVETRAN_SYNC_SETUP_ON: ClassVar[Any] = None + FIVETRAN_SYNC_FREQUENCY: ClassVar[Any] = None + FIVETRAN_SYNC_PAUSED: ClassVar[Any] = None + FIVETRAN_SYNC_SETUP_USER_FULL_NAME: ClassVar[Any] = None + FIVETRAN_SYNC_SETUP_USER_EMAIL: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_FREE: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_PAID: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_TOTAL: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_FREE: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_PAID: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_TOTAL: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_FREE_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_PAID_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_TOTAL_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = None + FIVETRAN_TOTAL_TABLES_SYNCED: ClassVar[Any] = None FIVETRAN_CONNECTOR_TOP_TABLES_BY_MAR: ClassVar[Any] = None - FIVETRAN_CONNECTOR_USAGE_COST: ClassVar[Any] = None - FIVETRAN_CONNECTOR_CREDITS_USED: ClassVar[Any] = None + FIVETRAN_USAGE_COST: ClassVar[Any] = None + FIVETRAN_CREDITS_USED: ClassVar[Any] = None FIVETRAN_WORKFLOW_NAME: ClassVar[Any] = None FIVETRAN_LAST_SYNC_STATUS: ClassVar[Any] = None FIVETRAN_LAST_SYNC_RECORDS_UPDATED: ClassVar[Any] = None @@ -140,139 +133,127 @@ class FivetranConnector(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - fivetran_connector_last_sync_id: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "FivetranConnector" + + fivetran_last_sync_id: Union[str, None, UnsetType] = UNSET """ID of the latest sync""" - fivetran_connector_last_sync_started_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_started_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync started on Fivetran, in milliseconds""" - fivetran_connector_last_sync_finished_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_finished_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync finished on Fivetran, in milliseconds""" - fivetran_connector_last_sync_reason: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_reason: Union[str, None, UnsetType] = UNSET """Failure reason for the latest sync on Fivetran. If status is FAILURE, this is the description of the reason why the sync failed. If status is FAILURE_WITH_TASK, this is the description of the Error. If status is RESCHEDULED, this is the description of the reason why the sync is rescheduled.""" - fivetran_connector_last_sync_task_type: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_task_type: Union[str, None, UnsetType] = UNSET """Failure task type for the latest sync on Fivetran. If status is FAILURE_WITH_TASK or RESCHEDULED, this field displays the type of the Error that caused the failure or rescheduling, respectively, e.g., reconnect, update_service_account, etc.""" - fivetran_connector_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) at which the latest sync is rescheduled at on Fivetran""" - fivetran_connector_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET """Number of tables synced in the latest sync on Fivetran""" - fivetran_connector_last_sync_extract_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_extract_time_seconds: Union[float, None, UnsetType] = UNSET """Extract time in seconds in the latest sync on fivetran""" - fivetran_connector_last_sync_extract_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_extract_volume_megabytes: Union[float, None, UnsetType] = UNSET """Extracted data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_load_time_seconds: Union[float, None, UnsetType] = UNSET """Load time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_load_volume_megabytes: Union[float, None, UnsetType] = UNSET """Loaded data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_process_time_seconds: Union[float, None, UnsetType] = UNSET """Process time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_process_volume_megabytes: Union[float, None, UnsetType] = UNSET """Process volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_total_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_total_time_seconds: Union[float, None, UnsetType] = UNSET """Total sync time in seconds in the latest sync on Fivetran""" - fivetran_connector_name: Union[str, None, UnsetType] = UNSET + fivetran_name: Union[str, None, UnsetType] = UNSET """Connector name added by the user on Fivetran""" - fivetran_connector_type: Union[str, None, UnsetType] = UNSET + fivetran_type: Union[str, None, UnsetType] = UNSET """Type of connector on Fivetran. Eg: snowflake, google_analytics, notion etc.""" - fivetran_connector_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorURL" + fivetran_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranURL" ) """URL to open the connector details on Fivetran""" - fivetran_connector_destination_name: Union[str, None, UnsetType] = UNSET + fivetran_destination_name: Union[str, None, UnsetType] = UNSET """Destination name added by the user on Fivetran""" - fivetran_connector_destination_type: Union[str, None, UnsetType] = UNSET + fivetran_destination_type: Union[str, None, UnsetType] = UNSET """Type of destination on Fivetran. Eg: redshift, bigquery etc.""" - fivetran_connector_destination_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorDestinationURL" + fivetran_destination_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranDestinationURL" ) """URL to open the destination details on Fivetran""" - fivetran_connector_sync_setup_on: Union[int, None, UnsetType] = UNSET + fivetran_sync_setup_on: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) on which the connector was setup on Fivetran, in milliseconds""" - fivetran_connector_sync_frequency: Union[str, None, UnsetType] = UNSET + fivetran_sync_frequency: Union[str, None, UnsetType] = UNSET """Sync frequency for the connector in number of hours. Eg: Every 6 hours""" - fivetran_connector_sync_paused: Union[bool, None, UnsetType] = UNSET + fivetran_sync_paused: Union[bool, None, UnsetType] = UNSET """Boolean to indicate whether the sync for this connector is paused or not""" - fivetran_connector_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET """Full name of the user who setup the connector on Fivetran""" - fivetran_connector_sync_setup_user_email: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_email: Union[str, None, UnsetType] = UNSET """Email ID of the user who setpu the connector on Fivetran""" - fivetran_connector_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET """Free Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET """Paid Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET """Total Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_change_percentage_free: Union[ + fivetran_monthly_active_rows_change_percentage_free: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of free MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_paid: Union[ + fivetran_monthly_active_rows_change_percentage_paid: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of paid MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_total: Union[ + fivetran_monthly_active_rows_change_percentage_total: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of total MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_free_percentage_of_account: Union[ + fivetran_monthly_active_rows_free_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total free MAR used by this connector""" - fivetran_connector_monthly_active_rows_paid_percentage_of_account: Union[ + fivetran_monthly_active_rows_paid_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total paid MAR used by this connector""" - fivetran_connector_monthly_active_rows_total_percentage_of_account: Union[ + fivetran_monthly_active_rows_total_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total MAR used by this connector""" - fivetran_connector_total_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_total_tables_synced: Union[int, None, UnsetType] = UNSET """Total number of tables synced by this connector""" fivetran_connector_top_tables_by_mar: Union[str, None, UnsetType] = msgspec.field( @@ -280,10 +261,10 @@ class FivetranConnector(Asset): ) """Total five tables sorted by MAR synced by this connector""" - fivetran_connector_usage_cost: Union[float, None, UnsetType] = UNSET + fivetran_usage_cost: Union[float, None, UnsetType] = UNSET """Total usage cost by this destination""" - fivetran_connector_credits_used: Union[float, None, UnsetType] = UNSET + fivetran_credits_used: Union[float, None, UnsetType] = UNSET """Total credits used by this destination""" fivetran_workflow_name: Union[str, None, UnsetType] = UNSET @@ -393,66 +374,6 @@ class FivetranConnector(Asset): def __post_init__(self) -> None: self.type_name = "FivetranConnector" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FivetranConnector instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FivetranConnector validation failed: {errors}") - - def minimize(self) -> "FivetranConnector": - """ - Return a minimal copy of this FivetranConnector with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FivetranConnector with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FivetranConnector instance with only the minimum required fields. - """ - self.validate() - return FivetranConnector(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFivetranConnector": - """ - Create a :class:`RelatedFivetranConnector` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFivetranConnector reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFivetranConnector(guid=self.guid) - return RelatedFivetranConnector(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -510,139 +431,125 @@ def from_json( class FivetranConnectorAttributes(AssetAttributes): """FivetranConnector-specific attributes for nested API format.""" - fivetran_connector_last_sync_id: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_id: Union[str, None, UnsetType] = UNSET """ID of the latest sync""" - fivetran_connector_last_sync_started_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_started_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync started on Fivetran, in milliseconds""" - fivetran_connector_last_sync_finished_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_finished_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync finished on Fivetran, in milliseconds""" - fivetran_connector_last_sync_reason: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_reason: Union[str, None, UnsetType] = UNSET """Failure reason for the latest sync on Fivetran. If status is FAILURE, this is the description of the reason why the sync failed. If status is FAILURE_WITH_TASK, this is the description of the Error. If status is RESCHEDULED, this is the description of the reason why the sync is rescheduled.""" - fivetran_connector_last_sync_task_type: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_task_type: Union[str, None, UnsetType] = UNSET """Failure task type for the latest sync on Fivetran. If status is FAILURE_WITH_TASK or RESCHEDULED, this field displays the type of the Error that caused the failure or rescheduling, respectively, e.g., reconnect, update_service_account, etc.""" - fivetran_connector_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) at which the latest sync is rescheduled at on Fivetran""" - fivetran_connector_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET """Number of tables synced in the latest sync on Fivetran""" - fivetran_connector_last_sync_extract_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_extract_time_seconds: Union[float, None, UnsetType] = UNSET """Extract time in seconds in the latest sync on fivetran""" - fivetran_connector_last_sync_extract_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_extract_volume_megabytes: Union[float, None, UnsetType] = UNSET """Extracted data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_load_time_seconds: Union[float, None, UnsetType] = UNSET """Load time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_load_volume_megabytes: Union[float, None, UnsetType] = UNSET """Loaded data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_process_time_seconds: Union[float, None, UnsetType] = UNSET """Process time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_process_volume_megabytes: Union[float, None, UnsetType] = UNSET """Process volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_total_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_total_time_seconds: Union[float, None, UnsetType] = UNSET """Total sync time in seconds in the latest sync on Fivetran""" - fivetran_connector_name: Union[str, None, UnsetType] = UNSET + fivetran_name: Union[str, None, UnsetType] = UNSET """Connector name added by the user on Fivetran""" - fivetran_connector_type: Union[str, None, UnsetType] = UNSET + fivetran_type: Union[str, None, UnsetType] = UNSET """Type of connector on Fivetran. Eg: snowflake, google_analytics, notion etc.""" - fivetran_connector_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorURL" + fivetran_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranURL" ) """URL to open the connector details on Fivetran""" - fivetran_connector_destination_name: Union[str, None, UnsetType] = UNSET + fivetran_destination_name: Union[str, None, UnsetType] = UNSET """Destination name added by the user on Fivetran""" - fivetran_connector_destination_type: Union[str, None, UnsetType] = UNSET + fivetran_destination_type: Union[str, None, UnsetType] = UNSET """Type of destination on Fivetran. Eg: redshift, bigquery etc.""" - fivetran_connector_destination_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorDestinationURL" + fivetran_destination_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranDestinationURL" ) """URL to open the destination details on Fivetran""" - fivetran_connector_sync_setup_on: Union[int, None, UnsetType] = UNSET + fivetran_sync_setup_on: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) on which the connector was setup on Fivetran, in milliseconds""" - fivetran_connector_sync_frequency: Union[str, None, UnsetType] = UNSET + fivetran_sync_frequency: Union[str, None, UnsetType] = UNSET """Sync frequency for the connector in number of hours. Eg: Every 6 hours""" - fivetran_connector_sync_paused: Union[bool, None, UnsetType] = UNSET + fivetran_sync_paused: Union[bool, None, UnsetType] = UNSET """Boolean to indicate whether the sync for this connector is paused or not""" - fivetran_connector_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET """Full name of the user who setup the connector on Fivetran""" - fivetran_connector_sync_setup_user_email: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_email: Union[str, None, UnsetType] = UNSET """Email ID of the user who setpu the connector on Fivetran""" - fivetran_connector_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET """Free Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET """Paid Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET """Total Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_change_percentage_free: Union[ + fivetran_monthly_active_rows_change_percentage_free: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of free MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_paid: Union[ + fivetran_monthly_active_rows_change_percentage_paid: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of paid MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_total: Union[ + fivetran_monthly_active_rows_change_percentage_total: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of total MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_free_percentage_of_account: Union[ + fivetran_monthly_active_rows_free_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total free MAR used by this connector""" - fivetran_connector_monthly_active_rows_paid_percentage_of_account: Union[ + fivetran_monthly_active_rows_paid_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total paid MAR used by this connector""" - fivetran_connector_monthly_active_rows_total_percentage_of_account: Union[ + fivetran_monthly_active_rows_total_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total MAR used by this connector""" - fivetran_connector_total_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_total_tables_synced: Union[int, None, UnsetType] = UNSET """Total number of tables synced by this connector""" fivetran_connector_top_tables_by_mar: Union[str, None, UnsetType] = msgspec.field( @@ -650,10 +557,10 @@ class FivetranConnectorAttributes(AssetAttributes): ) """Total five tables sorted by MAR synced by this connector""" - fivetran_connector_usage_cost: Union[float, None, UnsetType] = UNSET + fivetran_usage_cost: Union[float, None, UnsetType] = UNSET """Total usage cost by this destination""" - fivetran_connector_credits_used: Union[float, None, UnsetType] = UNSET + fivetran_credits_used: Union[float, None, UnsetType] = UNSET """Total credits used by this destination""" fivetran_workflow_name: Union[str, None, UnsetType] = UNSET @@ -823,94 +730,72 @@ def _populate_fivetran_connector_attrs( ) -> None: """Populate FivetranConnector-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.fivetran_connector_last_sync_id = obj.fivetran_connector_last_sync_id - attrs.fivetran_connector_last_sync_started_at = ( - obj.fivetran_connector_last_sync_started_at - ) - attrs.fivetran_connector_last_sync_finished_at = ( - obj.fivetran_connector_last_sync_finished_at - ) - attrs.fivetran_connector_last_sync_reason = obj.fivetran_connector_last_sync_reason - attrs.fivetran_connector_last_sync_task_type = ( - obj.fivetran_connector_last_sync_task_type - ) - attrs.fivetran_connector_last_sync_rescheduled_at = ( - obj.fivetran_connector_last_sync_rescheduled_at - ) - attrs.fivetran_connector_last_sync_tables_synced = ( - obj.fivetran_connector_last_sync_tables_synced - ) - attrs.fivetran_connector_last_sync_extract_time_seconds = ( - obj.fivetran_connector_last_sync_extract_time_seconds - ) - attrs.fivetran_connector_last_sync_extract_volume_megabytes = ( - obj.fivetran_connector_last_sync_extract_volume_megabytes - ) - attrs.fivetran_connector_last_sync_load_time_seconds = ( - obj.fivetran_connector_last_sync_load_time_seconds - ) - attrs.fivetran_connector_last_sync_load_volume_megabytes = ( - obj.fivetran_connector_last_sync_load_volume_megabytes - ) - attrs.fivetran_connector_last_sync_process_time_seconds = ( - obj.fivetran_connector_last_sync_process_time_seconds - ) - attrs.fivetran_connector_last_sync_process_volume_megabytes = ( - obj.fivetran_connector_last_sync_process_volume_megabytes - ) - attrs.fivetran_connector_last_sync_total_time_seconds = ( - obj.fivetran_connector_last_sync_total_time_seconds - ) - attrs.fivetran_connector_name = obj.fivetran_connector_name - attrs.fivetran_connector_type = obj.fivetran_connector_type - attrs.fivetran_connector_url = obj.fivetran_connector_url - attrs.fivetran_connector_destination_name = obj.fivetran_connector_destination_name - attrs.fivetran_connector_destination_type = obj.fivetran_connector_destination_type - attrs.fivetran_connector_destination_url = obj.fivetran_connector_destination_url - attrs.fivetran_connector_sync_setup_on = obj.fivetran_connector_sync_setup_on - attrs.fivetran_connector_sync_frequency = obj.fivetran_connector_sync_frequency - attrs.fivetran_connector_sync_paused = obj.fivetran_connector_sync_paused - attrs.fivetran_connector_sync_setup_user_full_name = ( - obj.fivetran_connector_sync_setup_user_full_name - ) - attrs.fivetran_connector_sync_setup_user_email = ( - obj.fivetran_connector_sync_setup_user_email - ) - attrs.fivetran_connector_monthly_active_rows_free = ( - obj.fivetran_connector_monthly_active_rows_free - ) - attrs.fivetran_connector_monthly_active_rows_paid = ( - obj.fivetran_connector_monthly_active_rows_paid - ) - attrs.fivetran_connector_monthly_active_rows_total = ( - obj.fivetran_connector_monthly_active_rows_total - ) - attrs.fivetran_connector_monthly_active_rows_change_percentage_free = ( - obj.fivetran_connector_monthly_active_rows_change_percentage_free - ) - attrs.fivetran_connector_monthly_active_rows_change_percentage_paid = ( - obj.fivetran_connector_monthly_active_rows_change_percentage_paid - ) - attrs.fivetran_connector_monthly_active_rows_change_percentage_total = ( - obj.fivetran_connector_monthly_active_rows_change_percentage_total - ) - attrs.fivetran_connector_monthly_active_rows_free_percentage_of_account = ( - obj.fivetran_connector_monthly_active_rows_free_percentage_of_account - ) - attrs.fivetran_connector_monthly_active_rows_paid_percentage_of_account = ( - obj.fivetran_connector_monthly_active_rows_paid_percentage_of_account - ) - attrs.fivetran_connector_monthly_active_rows_total_percentage_of_account = ( - obj.fivetran_connector_monthly_active_rows_total_percentage_of_account - ) - attrs.fivetran_connector_total_tables_synced = ( - obj.fivetran_connector_total_tables_synced - ) + attrs.fivetran_last_sync_id = obj.fivetran_last_sync_id + attrs.fivetran_last_sync_started_at = obj.fivetran_last_sync_started_at + attrs.fivetran_last_sync_finished_at = obj.fivetran_last_sync_finished_at + attrs.fivetran_last_sync_reason = obj.fivetran_last_sync_reason + attrs.fivetran_last_sync_task_type = obj.fivetran_last_sync_task_type + attrs.fivetran_last_sync_rescheduled_at = obj.fivetran_last_sync_rescheduled_at + attrs.fivetran_last_sync_tables_synced = obj.fivetran_last_sync_tables_synced + attrs.fivetran_last_sync_extract_time_seconds = ( + obj.fivetran_last_sync_extract_time_seconds + ) + attrs.fivetran_last_sync_extract_volume_megabytes = ( + obj.fivetran_last_sync_extract_volume_megabytes + ) + attrs.fivetran_last_sync_load_time_seconds = ( + obj.fivetran_last_sync_load_time_seconds + ) + attrs.fivetran_last_sync_load_volume_megabytes = ( + obj.fivetran_last_sync_load_volume_megabytes + ) + attrs.fivetran_last_sync_process_time_seconds = ( + obj.fivetran_last_sync_process_time_seconds + ) + attrs.fivetran_last_sync_process_volume_megabytes = ( + obj.fivetran_last_sync_process_volume_megabytes + ) + attrs.fivetran_last_sync_total_time_seconds = ( + obj.fivetran_last_sync_total_time_seconds + ) + attrs.fivetran_name = obj.fivetran_name + attrs.fivetran_type = obj.fivetran_type + attrs.fivetran_url = obj.fivetran_url + attrs.fivetran_destination_name = obj.fivetran_destination_name + attrs.fivetran_destination_type = obj.fivetran_destination_type + attrs.fivetran_destination_url = obj.fivetran_destination_url + attrs.fivetran_sync_setup_on = obj.fivetran_sync_setup_on + attrs.fivetran_sync_frequency = obj.fivetran_sync_frequency + attrs.fivetran_sync_paused = obj.fivetran_sync_paused + attrs.fivetran_sync_setup_user_full_name = obj.fivetran_sync_setup_user_full_name + attrs.fivetran_sync_setup_user_email = obj.fivetran_sync_setup_user_email + attrs.fivetran_monthly_active_rows_free = obj.fivetran_monthly_active_rows_free + attrs.fivetran_monthly_active_rows_paid = obj.fivetran_monthly_active_rows_paid + attrs.fivetran_monthly_active_rows_total = obj.fivetran_monthly_active_rows_total + attrs.fivetran_monthly_active_rows_change_percentage_free = ( + obj.fivetran_monthly_active_rows_change_percentage_free + ) + attrs.fivetran_monthly_active_rows_change_percentage_paid = ( + obj.fivetran_monthly_active_rows_change_percentage_paid + ) + attrs.fivetran_monthly_active_rows_change_percentage_total = ( + obj.fivetran_monthly_active_rows_change_percentage_total + ) + attrs.fivetran_monthly_active_rows_free_percentage_of_account = ( + obj.fivetran_monthly_active_rows_free_percentage_of_account + ) + attrs.fivetran_monthly_active_rows_paid_percentage_of_account = ( + obj.fivetran_monthly_active_rows_paid_percentage_of_account + ) + attrs.fivetran_monthly_active_rows_total_percentage_of_account = ( + obj.fivetran_monthly_active_rows_total_percentage_of_account + ) + attrs.fivetran_total_tables_synced = obj.fivetran_total_tables_synced attrs.fivetran_connector_top_tables_by_mar = ( obj.fivetran_connector_top_tables_by_mar ) - attrs.fivetran_connector_usage_cost = obj.fivetran_connector_usage_cost - attrs.fivetran_connector_credits_used = obj.fivetran_connector_credits_used + attrs.fivetran_usage_cost = obj.fivetran_usage_cost + attrs.fivetran_credits_used = obj.fivetran_credits_used attrs.fivetran_workflow_name = obj.fivetran_workflow_name attrs.fivetran_last_sync_status = obj.fivetran_last_sync_status attrs.fivetran_last_sync_records_updated = obj.fivetran_last_sync_records_updated @@ -919,104 +804,82 @@ def _populate_fivetran_connector_attrs( def _extract_fivetran_connector_attrs(attrs: FivetranConnectorAttributes) -> dict: """Extract all FivetranConnector attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["fivetran_connector_last_sync_id"] = attrs.fivetran_connector_last_sync_id - result["fivetran_connector_last_sync_started_at"] = ( - attrs.fivetran_connector_last_sync_started_at - ) - result["fivetran_connector_last_sync_finished_at"] = ( - attrs.fivetran_connector_last_sync_finished_at - ) - result["fivetran_connector_last_sync_reason"] = ( - attrs.fivetran_connector_last_sync_reason - ) - result["fivetran_connector_last_sync_task_type"] = ( - attrs.fivetran_connector_last_sync_task_type - ) - result["fivetran_connector_last_sync_rescheduled_at"] = ( - attrs.fivetran_connector_last_sync_rescheduled_at - ) - result["fivetran_connector_last_sync_tables_synced"] = ( - attrs.fivetran_connector_last_sync_tables_synced - ) - result["fivetran_connector_last_sync_extract_time_seconds"] = ( - attrs.fivetran_connector_last_sync_extract_time_seconds - ) - result["fivetran_connector_last_sync_extract_volume_megabytes"] = ( - attrs.fivetran_connector_last_sync_extract_volume_megabytes - ) - result["fivetran_connector_last_sync_load_time_seconds"] = ( - attrs.fivetran_connector_last_sync_load_time_seconds + result["fivetran_last_sync_id"] = attrs.fivetran_last_sync_id + result["fivetran_last_sync_started_at"] = attrs.fivetran_last_sync_started_at + result["fivetran_last_sync_finished_at"] = attrs.fivetran_last_sync_finished_at + result["fivetran_last_sync_reason"] = attrs.fivetran_last_sync_reason + result["fivetran_last_sync_task_type"] = attrs.fivetran_last_sync_task_type + result["fivetran_last_sync_rescheduled_at"] = ( + attrs.fivetran_last_sync_rescheduled_at ) - result["fivetran_connector_last_sync_load_volume_megabytes"] = ( - attrs.fivetran_connector_last_sync_load_volume_megabytes + result["fivetran_last_sync_tables_synced"] = attrs.fivetran_last_sync_tables_synced + result["fivetran_last_sync_extract_time_seconds"] = ( + attrs.fivetran_last_sync_extract_time_seconds ) - result["fivetran_connector_last_sync_process_time_seconds"] = ( - attrs.fivetran_connector_last_sync_process_time_seconds + result["fivetran_last_sync_extract_volume_megabytes"] = ( + attrs.fivetran_last_sync_extract_volume_megabytes ) - result["fivetran_connector_last_sync_process_volume_megabytes"] = ( - attrs.fivetran_connector_last_sync_process_volume_megabytes + result["fivetran_last_sync_load_time_seconds"] = ( + attrs.fivetran_last_sync_load_time_seconds ) - result["fivetran_connector_last_sync_total_time_seconds"] = ( - attrs.fivetran_connector_last_sync_total_time_seconds + result["fivetran_last_sync_load_volume_megabytes"] = ( + attrs.fivetran_last_sync_load_volume_megabytes ) - result["fivetran_connector_name"] = attrs.fivetran_connector_name - result["fivetran_connector_type"] = attrs.fivetran_connector_type - result["fivetran_connector_url"] = attrs.fivetran_connector_url - result["fivetran_connector_destination_name"] = ( - attrs.fivetran_connector_destination_name + result["fivetran_last_sync_process_time_seconds"] = ( + attrs.fivetran_last_sync_process_time_seconds ) - result["fivetran_connector_destination_type"] = ( - attrs.fivetran_connector_destination_type + result["fivetran_last_sync_process_volume_megabytes"] = ( + attrs.fivetran_last_sync_process_volume_megabytes ) - result["fivetran_connector_destination_url"] = ( - attrs.fivetran_connector_destination_url + result["fivetran_last_sync_total_time_seconds"] = ( + attrs.fivetran_last_sync_total_time_seconds ) - result["fivetran_connector_sync_setup_on"] = attrs.fivetran_connector_sync_setup_on - result["fivetran_connector_sync_frequency"] = ( - attrs.fivetran_connector_sync_frequency + result["fivetran_name"] = attrs.fivetran_name + result["fivetran_type"] = attrs.fivetran_type + result["fivetran_url"] = attrs.fivetran_url + result["fivetran_destination_name"] = attrs.fivetran_destination_name + result["fivetran_destination_type"] = attrs.fivetran_destination_type + result["fivetran_destination_url"] = attrs.fivetran_destination_url + result["fivetran_sync_setup_on"] = attrs.fivetran_sync_setup_on + result["fivetran_sync_frequency"] = attrs.fivetran_sync_frequency + result["fivetran_sync_paused"] = attrs.fivetran_sync_paused + result["fivetran_sync_setup_user_full_name"] = ( + attrs.fivetran_sync_setup_user_full_name ) - result["fivetran_connector_sync_paused"] = attrs.fivetran_connector_sync_paused - result["fivetran_connector_sync_setup_user_full_name"] = ( - attrs.fivetran_connector_sync_setup_user_full_name + result["fivetran_sync_setup_user_email"] = attrs.fivetran_sync_setup_user_email + result["fivetran_monthly_active_rows_free"] = ( + attrs.fivetran_monthly_active_rows_free ) - result["fivetran_connector_sync_setup_user_email"] = ( - attrs.fivetran_connector_sync_setup_user_email + result["fivetran_monthly_active_rows_paid"] = ( + attrs.fivetran_monthly_active_rows_paid ) - result["fivetran_connector_monthly_active_rows_free"] = ( - attrs.fivetran_connector_monthly_active_rows_free + result["fivetran_monthly_active_rows_total"] = ( + attrs.fivetran_monthly_active_rows_total ) - result["fivetran_connector_monthly_active_rows_paid"] = ( - attrs.fivetran_connector_monthly_active_rows_paid + result["fivetran_monthly_active_rows_change_percentage_free"] = ( + attrs.fivetran_monthly_active_rows_change_percentage_free ) - result["fivetran_connector_monthly_active_rows_total"] = ( - attrs.fivetran_connector_monthly_active_rows_total + result["fivetran_monthly_active_rows_change_percentage_paid"] = ( + attrs.fivetran_monthly_active_rows_change_percentage_paid ) - result["fivetran_connector_monthly_active_rows_change_percentage_free"] = ( - attrs.fivetran_connector_monthly_active_rows_change_percentage_free + result["fivetran_monthly_active_rows_change_percentage_total"] = ( + attrs.fivetran_monthly_active_rows_change_percentage_total ) - result["fivetran_connector_monthly_active_rows_change_percentage_paid"] = ( - attrs.fivetran_connector_monthly_active_rows_change_percentage_paid + result["fivetran_monthly_active_rows_free_percentage_of_account"] = ( + attrs.fivetran_monthly_active_rows_free_percentage_of_account ) - result["fivetran_connector_monthly_active_rows_change_percentage_total"] = ( - attrs.fivetran_connector_monthly_active_rows_change_percentage_total + result["fivetran_monthly_active_rows_paid_percentage_of_account"] = ( + attrs.fivetran_monthly_active_rows_paid_percentage_of_account ) - result["fivetran_connector_monthly_active_rows_free_percentage_of_account"] = ( - attrs.fivetran_connector_monthly_active_rows_free_percentage_of_account - ) - result["fivetran_connector_monthly_active_rows_paid_percentage_of_account"] = ( - attrs.fivetran_connector_monthly_active_rows_paid_percentage_of_account - ) - result["fivetran_connector_monthly_active_rows_total_percentage_of_account"] = ( - attrs.fivetran_connector_monthly_active_rows_total_percentage_of_account - ) - result["fivetran_connector_total_tables_synced"] = ( - attrs.fivetran_connector_total_tables_synced + result["fivetran_monthly_active_rows_total_percentage_of_account"] = ( + attrs.fivetran_monthly_active_rows_total_percentage_of_account ) + result["fivetran_total_tables_synced"] = attrs.fivetran_total_tables_synced result["fivetran_connector_top_tables_by_mar"] = ( attrs.fivetran_connector_top_tables_by_mar ) - result["fivetran_connector_usage_cost"] = attrs.fivetran_connector_usage_cost - result["fivetran_connector_credits_used"] = attrs.fivetran_connector_credits_used + result["fivetran_usage_cost"] = attrs.fivetran_usage_cost + result["fivetran_credits_used"] = attrs.fivetran_credits_used result["fivetran_workflow_name"] = attrs.fivetran_workflow_name result["fivetran_last_sync_status"] = attrs.fivetran_last_sync_status result["fivetran_last_sync_records_updated"] = ( @@ -1062,9 +925,6 @@ def _fivetran_connector_to_nested( is_incomplete=fivetran_connector.is_incomplete, provenance_type=fivetran_connector.provenance_type, home_id=fivetran_connector.home_id, - depth=fivetran_connector.depth, - immediate_upstream=fivetran_connector.immediate_upstream, - immediate_downstream=fivetran_connector.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1100,6 +960,7 @@ def _fivetran_connector_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1108,9 +969,6 @@ def _fivetran_connector_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fivetran_connector_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1143,146 +1001,125 @@ def _fivetran_connector_from_nested_bytes( RelationField, ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_ID = KeywordField( - "fivetranConnectorLastSyncId", "fivetranConnectorLastSyncId" -) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_STARTED_AT = NumericField( - "fivetranConnectorLastSyncStartedAt", "fivetranConnectorLastSyncStartedAt" -) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_FINISHED_AT = NumericField( - "fivetranConnectorLastSyncFinishedAt", "fivetranConnectorLastSyncFinishedAt" -) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_REASON = KeywordTextField( - "fivetranConnectorLastSyncReason", - "fivetranConnectorLastSyncReason", - "fivetranConnectorLastSyncReason.text", +FivetranConnector.FIVETRAN_LAST_SYNC_ID = KeywordField( + "fivetranLastSyncId", "fivetranLastSyncId" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_TASK_TYPE = KeywordField( - "fivetranConnectorLastSyncTaskType", "fivetranConnectorLastSyncTaskType" +FivetranConnector.FIVETRAN_LAST_SYNC_STARTED_AT = NumericField( + "fivetranLastSyncStartedAt", "fivetranLastSyncStartedAt" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_RESCHEDULED_AT = NumericField( - "fivetranConnectorLastSyncRescheduledAt", "fivetranConnectorLastSyncRescheduledAt" +FivetranConnector.FIVETRAN_LAST_SYNC_FINISHED_AT = NumericField( + "fivetranLastSyncFinishedAt", "fivetranLastSyncFinishedAt" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_TABLES_SYNCED = NumericField( - "fivetranConnectorLastSyncTablesSynced", "fivetranConnectorLastSyncTablesSynced" +FivetranConnector.FIVETRAN_LAST_SYNC_REASON = KeywordTextField( + "fivetranLastSyncReason", "fivetranLastSyncReason", "fivetranLastSyncReason.text" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_EXTRACT_TIME_SECONDS = NumericField( - "fivetranConnectorLastSyncExtractTimeSeconds", - "fivetranConnectorLastSyncExtractTimeSeconds", +FivetranConnector.FIVETRAN_LAST_SYNC_TASK_TYPE = KeywordField( + "fivetranLastSyncTaskType", "fivetranLastSyncTaskType" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_EXTRACT_VOLUME_MEGABYTES = NumericField( - "fivetranConnectorLastSyncExtractVolumeMegabytes", - "fivetranConnectorLastSyncExtractVolumeMegabytes", +FivetranConnector.FIVETRAN_LAST_SYNC_RESCHEDULED_AT = NumericField( + "fivetranLastSyncRescheduledAt", "fivetranLastSyncRescheduledAt" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_LOAD_TIME_SECONDS = NumericField( - "fivetranConnectorLastSyncLoadTimeSeconds", - "fivetranConnectorLastSyncLoadTimeSeconds", +FivetranConnector.FIVETRAN_LAST_SYNC_TABLES_SYNCED = NumericField( + "fivetranLastSyncTablesSynced", "fivetranLastSyncTablesSynced" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_LOAD_VOLUME_MEGABYTES = NumericField( - "fivetranConnectorLastSyncLoadVolumeMegabytes", - "fivetranConnectorLastSyncLoadVolumeMegabytes", +FivetranConnector.FIVETRAN_LAST_SYNC_EXTRACT_TIME_SECONDS = NumericField( + "fivetranLastSyncExtractTimeSeconds", "fivetranLastSyncExtractTimeSeconds" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_PROCESS_TIME_SECONDS = NumericField( - "fivetranConnectorLastSyncProcessTimeSeconds", - "fivetranConnectorLastSyncProcessTimeSeconds", +FivetranConnector.FIVETRAN_LAST_SYNC_EXTRACT_VOLUME_MEGABYTES = NumericField( + "fivetranLastSyncExtractVolumeMegabytes", "fivetranLastSyncExtractVolumeMegabytes" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_PROCESS_VOLUME_MEGABYTES = NumericField( - "fivetranConnectorLastSyncProcessVolumeMegabytes", - "fivetranConnectorLastSyncProcessVolumeMegabytes", +FivetranConnector.FIVETRAN_LAST_SYNC_LOAD_TIME_SECONDS = NumericField( + "fivetranLastSyncLoadTimeSeconds", "fivetranLastSyncLoadTimeSeconds" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_TOTAL_TIME_SECONDS = NumericField( - "fivetranConnectorLastSyncTotalTimeSeconds", - "fivetranConnectorLastSyncTotalTimeSeconds", +FivetranConnector.FIVETRAN_LAST_SYNC_LOAD_VOLUME_MEGABYTES = NumericField( + "fivetranLastSyncLoadVolumeMegabytes", "fivetranLastSyncLoadVolumeMegabytes" ) -FivetranConnector.FIVETRAN_CONNECTOR_NAME = KeywordField( - "fivetranConnectorName", "fivetranConnectorName" +FivetranConnector.FIVETRAN_LAST_SYNC_PROCESS_TIME_SECONDS = NumericField( + "fivetranLastSyncProcessTimeSeconds", "fivetranLastSyncProcessTimeSeconds" ) -FivetranConnector.FIVETRAN_CONNECTOR_TYPE = KeywordField( - "fivetranConnectorType", "fivetranConnectorType" +FivetranConnector.FIVETRAN_LAST_SYNC_PROCESS_VOLUME_MEGABYTES = NumericField( + "fivetranLastSyncProcessVolumeMegabytes", "fivetranLastSyncProcessVolumeMegabytes" ) -FivetranConnector.FIVETRAN_CONNECTOR_URL = KeywordField( - "fivetranConnectorURL", "fivetranConnectorURL" +FivetranConnector.FIVETRAN_LAST_SYNC_TOTAL_TIME_SECONDS = NumericField( + "fivetranLastSyncTotalTimeSeconds", "fivetranLastSyncTotalTimeSeconds" ) -FivetranConnector.FIVETRAN_CONNECTOR_DESTINATION_NAME = KeywordField( - "fivetranConnectorDestinationName", "fivetranConnectorDestinationName" +FivetranConnector.FIVETRAN_NAME = KeywordField("fivetranName", "fivetranName") +FivetranConnector.FIVETRAN_TYPE = KeywordField("fivetranType", "fivetranType") +FivetranConnector.FIVETRAN_URL = KeywordField("fivetranURL", "fivetranURL") +FivetranConnector.FIVETRAN_DESTINATION_NAME = KeywordField( + "fivetranDestinationName", "fivetranDestinationName" ) -FivetranConnector.FIVETRAN_CONNECTOR_DESTINATION_TYPE = KeywordField( - "fivetranConnectorDestinationType", "fivetranConnectorDestinationType" +FivetranConnector.FIVETRAN_DESTINATION_TYPE = KeywordField( + "fivetranDestinationType", "fivetranDestinationType" ) -FivetranConnector.FIVETRAN_CONNECTOR_DESTINATION_URL = KeywordField( - "fivetranConnectorDestinationURL", "fivetranConnectorDestinationURL" +FivetranConnector.FIVETRAN_DESTINATION_URL = KeywordField( + "fivetranDestinationURL", "fivetranDestinationURL" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_SETUP_ON = NumericField( - "fivetranConnectorSyncSetupOn", "fivetranConnectorSyncSetupOn" +FivetranConnector.FIVETRAN_SYNC_SETUP_ON = NumericField( + "fivetranSyncSetupOn", "fivetranSyncSetupOn" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_FREQUENCY = KeywordField( - "fivetranConnectorSyncFrequency", "fivetranConnectorSyncFrequency" +FivetranConnector.FIVETRAN_SYNC_FREQUENCY = KeywordField( + "fivetranSyncFrequency", "fivetranSyncFrequency" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_PAUSED = BooleanField( - "fivetranConnectorSyncPaused", "fivetranConnectorSyncPaused" +FivetranConnector.FIVETRAN_SYNC_PAUSED = BooleanField( + "fivetranSyncPaused", "fivetranSyncPaused" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_SETUP_USER_FULL_NAME = KeywordField( - "fivetranConnectorSyncSetupUserFullName", "fivetranConnectorSyncSetupUserFullName" +FivetranConnector.FIVETRAN_SYNC_SETUP_USER_FULL_NAME = KeywordField( + "fivetranSyncSetupUserFullName", "fivetranSyncSetupUserFullName" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_SETUP_USER_EMAIL = KeywordField( - "fivetranConnectorSyncSetupUserEmail", "fivetranConnectorSyncSetupUserEmail" +FivetranConnector.FIVETRAN_SYNC_SETUP_USER_EMAIL = KeywordField( + "fivetranSyncSetupUserEmail", "fivetranSyncSetupUserEmail" ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_FREE = NumericField( - "fivetranConnectorMonthlyActiveRowsFree", "fivetranConnectorMonthlyActiveRowsFree" +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_FREE = NumericField( + "fivetranMonthlyActiveRowsFree", "fivetranMonthlyActiveRowsFree" ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_PAID = NumericField( - "fivetranConnectorMonthlyActiveRowsPaid", "fivetranConnectorMonthlyActiveRowsPaid" +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_PAID = NumericField( + "fivetranMonthlyActiveRowsPaid", "fivetranMonthlyActiveRowsPaid" ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_TOTAL = NumericField( - "fivetranConnectorMonthlyActiveRowsTotal", "fivetranConnectorMonthlyActiveRowsTotal" +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_TOTAL = NumericField( + "fivetranMonthlyActiveRowsTotal", "fivetranMonthlyActiveRowsTotal" ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_FREE = ( - NumericField( - "fivetranConnectorMonthlyActiveRowsChangePercentageFree", - "fivetranConnectorMonthlyActiveRowsChangePercentageFree", - ) +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_FREE = NumericField( + "fivetranMonthlyActiveRowsChangePercentageFree", + "fivetranMonthlyActiveRowsChangePercentageFree", ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_PAID = ( - NumericField( - "fivetranConnectorMonthlyActiveRowsChangePercentagePaid", - "fivetranConnectorMonthlyActiveRowsChangePercentagePaid", - ) +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_PAID = NumericField( + "fivetranMonthlyActiveRowsChangePercentagePaid", + "fivetranMonthlyActiveRowsChangePercentagePaid", ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_TOTAL = ( - NumericField( - "fivetranConnectorMonthlyActiveRowsChangePercentageTotal", - "fivetranConnectorMonthlyActiveRowsChangePercentageTotal", - ) +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_TOTAL = NumericField( + "fivetranMonthlyActiveRowsChangePercentageTotal", + "fivetranMonthlyActiveRowsChangePercentageTotal", ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_FREE_PERCENTAGE_OF_ACCOUNT = ( +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_FREE_PERCENTAGE_OF_ACCOUNT = ( NumericField( - "fivetranConnectorMonthlyActiveRowsFreePercentageOfAccount", - "fivetranConnectorMonthlyActiveRowsFreePercentageOfAccount", + "fivetranMonthlyActiveRowsFreePercentageOfAccount", + "fivetranMonthlyActiveRowsFreePercentageOfAccount", ) ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_PAID_PERCENTAGE_OF_ACCOUNT = ( +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_PAID_PERCENTAGE_OF_ACCOUNT = ( NumericField( - "fivetranConnectorMonthlyActiveRowsPaidPercentageOfAccount", - "fivetranConnectorMonthlyActiveRowsPaidPercentageOfAccount", + "fivetranMonthlyActiveRowsPaidPercentageOfAccount", + "fivetranMonthlyActiveRowsPaidPercentageOfAccount", ) ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_TOTAL_PERCENTAGE_OF_ACCOUNT = ( +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_TOTAL_PERCENTAGE_OF_ACCOUNT = ( NumericField( - "fivetranConnectorMonthlyActiveRowsTotalPercentageOfAccount", - "fivetranConnectorMonthlyActiveRowsTotalPercentageOfAccount", + "fivetranMonthlyActiveRowsTotalPercentageOfAccount", + "fivetranMonthlyActiveRowsTotalPercentageOfAccount", ) ) -FivetranConnector.FIVETRAN_CONNECTOR_TOTAL_TABLES_SYNCED = NumericField( - "fivetranConnectorTotalTablesSynced", "fivetranConnectorTotalTablesSynced" +FivetranConnector.FIVETRAN_TOTAL_TABLES_SYNCED = NumericField( + "fivetranTotalTablesSynced", "fivetranTotalTablesSynced" ) FivetranConnector.FIVETRAN_CONNECTOR_TOP_TABLES_BY_MAR = KeywordField( "fivetranConnectorTopTablesByMAR", "fivetranConnectorTopTablesByMAR" ) -FivetranConnector.FIVETRAN_CONNECTOR_USAGE_COST = NumericField( - "fivetranConnectorUsageCost", "fivetranConnectorUsageCost" +FivetranConnector.FIVETRAN_USAGE_COST = NumericField( + "fivetranUsageCost", "fivetranUsageCost" ) -FivetranConnector.FIVETRAN_CONNECTOR_CREDITS_USED = NumericField( - "fivetranConnectorCreditsUsed", "fivetranConnectorCreditsUsed" +FivetranConnector.FIVETRAN_CREDITS_USED = NumericField( + "fivetranCreditsUsed", "fivetranCreditsUsed" ) FivetranConnector.FIVETRAN_WORKFLOW_NAME = KeywordField( "fivetranWorkflowName", "fivetranWorkflowName" diff --git a/pyatlan_v9/model/assets/fivetran_related.py b/pyatlan_v9/model/assets/fivetran_related.py index 3ee933554..16f104819 100644 --- a/pyatlan_v9/model/assets/fivetran_related.py +++ b/pyatlan_v9/model/assets/fivetran_related.py @@ -59,139 +59,125 @@ class RelatedFivetranConnector(RelatedFivetran): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "FivetranConnector" so it serializes correctly - fivetran_connector_last_sync_id: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_id: Union[str, None, UnsetType] = UNSET """ID of the latest sync""" - fivetran_connector_last_sync_started_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_started_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync started on Fivetran, in milliseconds""" - fivetran_connector_last_sync_finished_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_finished_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync finished on Fivetran, in milliseconds""" - fivetran_connector_last_sync_reason: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_reason: Union[str, None, UnsetType] = UNSET """Failure reason for the latest sync on Fivetran. If status is FAILURE, this is the description of the reason why the sync failed. If status is FAILURE_WITH_TASK, this is the description of the Error. If status is RESCHEDULED, this is the description of the reason why the sync is rescheduled.""" - fivetran_connector_last_sync_task_type: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_task_type: Union[str, None, UnsetType] = UNSET """Failure task type for the latest sync on Fivetran. If status is FAILURE_WITH_TASK or RESCHEDULED, this field displays the type of the Error that caused the failure or rescheduling, respectively, e.g., reconnect, update_service_account, etc.""" - fivetran_connector_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) at which the latest sync is rescheduled at on Fivetran""" - fivetran_connector_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET """Number of tables synced in the latest sync on Fivetran""" - fivetran_connector_last_sync_extract_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_extract_time_seconds: Union[float, None, UnsetType] = UNSET """Extract time in seconds in the latest sync on fivetran""" - fivetran_connector_last_sync_extract_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_extract_volume_megabytes: Union[float, None, UnsetType] = UNSET """Extracted data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_load_time_seconds: Union[float, None, UnsetType] = UNSET """Load time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_load_volume_megabytes: Union[float, None, UnsetType] = UNSET """Loaded data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_process_time_seconds: Union[float, None, UnsetType] = UNSET """Process time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_process_volume_megabytes: Union[float, None, UnsetType] = UNSET """Process volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_total_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_total_time_seconds: Union[float, None, UnsetType] = UNSET """Total sync time in seconds in the latest sync on Fivetran""" - fivetran_connector_name: Union[str, None, UnsetType] = UNSET + fivetran_name: Union[str, None, UnsetType] = UNSET """Connector name added by the user on Fivetran""" - fivetran_connector_type: Union[str, None, UnsetType] = UNSET + fivetran_type: Union[str, None, UnsetType] = UNSET """Type of connector on Fivetran. Eg: snowflake, google_analytics, notion etc.""" - fivetran_connector_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorURL" + fivetran_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranURL" ) """URL to open the connector details on Fivetran""" - fivetran_connector_destination_name: Union[str, None, UnsetType] = UNSET + fivetran_destination_name: Union[str, None, UnsetType] = UNSET """Destination name added by the user on Fivetran""" - fivetran_connector_destination_type: Union[str, None, UnsetType] = UNSET + fivetran_destination_type: Union[str, None, UnsetType] = UNSET """Type of destination on Fivetran. Eg: redshift, bigquery etc.""" - fivetran_connector_destination_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorDestinationURL" + fivetran_destination_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranDestinationURL" ) """URL to open the destination details on Fivetran""" - fivetran_connector_sync_setup_on: Union[int, None, UnsetType] = UNSET + fivetran_sync_setup_on: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) on which the connector was setup on Fivetran, in milliseconds""" - fivetran_connector_sync_frequency: Union[str, None, UnsetType] = UNSET + fivetran_sync_frequency: Union[str, None, UnsetType] = UNSET """Sync frequency for the connector in number of hours. Eg: Every 6 hours""" - fivetran_connector_sync_paused: Union[bool, None, UnsetType] = UNSET + fivetran_sync_paused: Union[bool, None, UnsetType] = UNSET """Boolean to indicate whether the sync for this connector is paused or not""" - fivetran_connector_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET """Full name of the user who setup the connector on Fivetran""" - fivetran_connector_sync_setup_user_email: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_email: Union[str, None, UnsetType] = UNSET """Email ID of the user who setpu the connector on Fivetran""" - fivetran_connector_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET """Free Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET """Paid Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET """Total Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_change_percentage_free: Union[ + fivetran_monthly_active_rows_change_percentage_free: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of free MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_paid: Union[ + fivetran_monthly_active_rows_change_percentage_paid: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of paid MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_total: Union[ + fivetran_monthly_active_rows_change_percentage_total: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of total MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_free_percentage_of_account: Union[ + fivetran_monthly_active_rows_free_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total free MAR used by this connector""" - fivetran_connector_monthly_active_rows_paid_percentage_of_account: Union[ + fivetran_monthly_active_rows_paid_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total paid MAR used by this connector""" - fivetran_connector_monthly_active_rows_total_percentage_of_account: Union[ + fivetran_monthly_active_rows_total_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total MAR used by this connector""" - fivetran_connector_total_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_total_tables_synced: Union[int, None, UnsetType] = UNSET """Total number of tables synced by this connector""" fivetran_connector_top_tables_by_mar: Union[str, None, UnsetType] = msgspec.field( @@ -199,10 +185,10 @@ class RelatedFivetranConnector(RelatedFivetran): ) """Total five tables sorted by MAR synced by this connector""" - fivetran_connector_usage_cost: Union[float, None, UnsetType] = UNSET + fivetran_usage_cost: Union[float, None, UnsetType] = UNSET """Total usage cost by this destination""" - fivetran_connector_credits_used: Union[float, None, UnsetType] = UNSET + fivetran_credits_used: Union[float, None, UnsetType] = UNSET """Total credits used by this destination""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/flow.py b/pyatlan_v9/model/assets/flow.py index cc923756d..e86bb1413 100644 --- a/pyatlan_v9/model/assets/flow.py +++ b/pyatlan_v9/model/assets/flow.py @@ -38,7 +38,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import RelatedFlow from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -90,6 +89,8 @@ class Flow(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Flow" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -195,66 +196,6 @@ class Flow(Asset): def __post_init__(self) -> None: self.type_name = "Flow" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Flow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Flow validation failed: {errors}") - - def minimize(self) -> "Flow": - """ - Return a minimal copy of this Flow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Flow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Flow instance with only the minimum required fields. - """ - self.validate() - return Flow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlow": - """ - Create a :class:`RelatedFlow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlow(guid=self.guid) - return RelatedFlow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -527,9 +468,6 @@ def _flow_to_nested(flow: Flow) -> FlowNested: is_incomplete=flow.is_incomplete, provenance_type=flow.provenance_type, home_id=flow.home_id, - depth=flow.depth, - immediate_upstream=flow.immediate_upstream, - immediate_downstream=flow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -559,6 +497,7 @@ def _flow_from_nested(nested: FlowNested) -> Flow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -567,9 +506,6 @@ def _flow_from_nested(nested: FlowNested) -> Flow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/flow_control_operation.py b/pyatlan_v9/model/assets/flow_control_operation.py index 503529348..197c7c940 100644 --- a/pyatlan_v9/model/assets/flow_control_operation.py +++ b/pyatlan_v9/model/assets/flow_control_operation.py @@ -97,6 +97,8 @@ class FlowControlOperation(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowControlOperation" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -225,70 +227,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowControlOperation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"FlowControlOperation validation failed: {errors}") - - def minimize(self) -> "FlowControlOperation": - """ - Return a minimal copy of this FlowControlOperation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowControlOperation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowControlOperation instance with only the minimum required fields. - """ - self.validate() - return FlowControlOperation(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowControlOperation": - """ - Create a :class:`RelatedFlowControlOperation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowControlOperation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowControlOperation(guid=self.guid) - return RelatedFlowControlOperation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -599,9 +537,6 @@ def _flow_control_operation_to_nested( is_incomplete=flow_control_operation.is_incomplete, provenance_type=flow_control_operation.provenance_type, home_id=flow_control_operation.home_id, - depth=flow_control_operation.depth, - immediate_upstream=flow_control_operation.immediate_upstream, - immediate_downstream=flow_control_operation.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -637,6 +572,7 @@ def _flow_control_operation_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -645,9 +581,6 @@ def _flow_control_operation_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_control_operation_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/flow_dataset.py b/pyatlan_v9/model/assets/flow_dataset.py index b1ec4d379..6f522eed0 100644 --- a/pyatlan_v9/model/assets/flow_dataset.py +++ b/pyatlan_v9/model/assets/flow_dataset.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import RelatedFlowDataset, RelatedFlowField, RelatedFlowReusableUnit +from .flow_related import RelatedFlowField, RelatedFlowReusableUnit from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -113,6 +113,8 @@ class FlowDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowDataset" + flow_field_count: Union[int, None, UnsetType] = UNSET """Count of the number of individual fields that make up this ephemeral dataset.""" @@ -277,78 +279,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.flow_detailed_by is UNSET: - errors.append("flow_detailed_by is required for creation") - if self.flow_reusable_unit_name is UNSET: - errors.append("flow_reusable_unit_name is required for creation") - if self.flow_reusable_unit_qualified_name is UNSET: - errors.append( - "flow_reusable_unit_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FlowDataset validation failed: {errors}") - - def minimize(self) -> "FlowDataset": - """ - Return a minimal copy of this FlowDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowDataset instance with only the minimum required fields. - """ - self.validate() - return FlowDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowDataset": - """ - Create a :class:`RelatedFlowDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowDataset(guid=self.guid) - return RelatedFlowDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -701,9 +631,6 @@ def _flow_dataset_to_nested(flow_dataset: FlowDataset) -> FlowDatasetNested: is_incomplete=flow_dataset.is_incomplete, provenance_type=flow_dataset.provenance_type, home_id=flow_dataset.home_id, - depth=flow_dataset.depth, - immediate_upstream=flow_dataset.immediate_upstream, - immediate_downstream=flow_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -735,6 +662,7 @@ def _flow_dataset_from_nested(nested: FlowDatasetNested) -> FlowDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -743,9 +671,6 @@ def _flow_dataset_from_nested(nested: FlowDatasetNested) -> FlowDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/flow_dataset_operation.py b/pyatlan_v9/model/assets/flow_dataset_operation.py index 189ca6e40..057b98784 100644 --- a/pyatlan_v9/model/assets/flow_dataset_operation.py +++ b/pyatlan_v9/model/assets/flow_dataset_operation.py @@ -45,11 +45,7 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector -from .flow_related import ( - RelatedFlowControlOperation, - RelatedFlowDatasetOperation, - RelatedFlowReusableUnit, -) +from .flow_related import RelatedFlowControlOperation, RelatedFlowReusableUnit from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -126,6 +122,8 @@ class FlowDatasetOperation(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowDatasetOperation" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -299,78 +297,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowDatasetOperation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.flow_reusable_unit is UNSET: - errors.append("flow_reusable_unit is required for creation") - if self.flow_reusable_unit_name is UNSET: - errors.append("flow_reusable_unit_name is required for creation") - if self.flow_reusable_unit_qualified_name is UNSET: - errors.append( - "flow_reusable_unit_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FlowDatasetOperation validation failed: {errors}") - - def minimize(self) -> "FlowDatasetOperation": - """ - Return a minimal copy of this FlowDatasetOperation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowDatasetOperation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowDatasetOperation instance with only the minimum required fields. - """ - self.validate() - return FlowDatasetOperation(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowDatasetOperation": - """ - Create a :class:`RelatedFlowDatasetOperation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowDatasetOperation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowDatasetOperation(guid=self.guid) - return RelatedFlowDatasetOperation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -751,9 +677,6 @@ def _flow_dataset_operation_to_nested( is_incomplete=flow_dataset_operation.is_incomplete, provenance_type=flow_dataset_operation.provenance_type, home_id=flow_dataset_operation.home_id, - depth=flow_dataset_operation.depth, - immediate_upstream=flow_dataset_operation.immediate_upstream, - immediate_downstream=flow_dataset_operation.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -789,6 +712,7 @@ def _flow_dataset_operation_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -797,9 +721,6 @@ def _flow_dataset_operation_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_dataset_operation_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/flow_field.py b/pyatlan_v9/model/assets/flow_field.py index 87c1f8ce0..f79d7c7ad 100644 --- a/pyatlan_v9/model/assets/flow_field.py +++ b/pyatlan_v9/model/assets/flow_field.py @@ -40,7 +40,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import RelatedFlowDataset, RelatedFlowField +from .flow_related import RelatedFlowDataset from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -111,6 +111,8 @@ class FlowField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowField" + flow_dataset_name: Union[str, None, UnsetType] = UNSET """Simple name of the ephemeral dataset in which this field is contained.""" @@ -271,82 +273,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.flow_dataset is UNSET: - errors.append("flow_dataset is required for creation") - if self.flow_dataset_name is UNSET: - errors.append("flow_dataset_name is required for creation") - if self.flow_dataset_qualified_name is UNSET: - errors.append("flow_dataset_qualified_name is required for creation") - if self.flow_reusable_unit_name is UNSET: - errors.append("flow_reusable_unit_name is required for creation") - if self.flow_reusable_unit_qualified_name is UNSET: - errors.append( - "flow_reusable_unit_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FlowField validation failed: {errors}") - - def minimize(self) -> "FlowField": - """ - Return a minimal copy of this FlowField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowField instance with only the minimum required fields. - """ - self.validate() - return FlowField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowField": - """ - Create a :class:`RelatedFlowField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowField(guid=self.guid) - return RelatedFlowField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -689,9 +615,6 @@ def _flow_field_to_nested(flow_field: FlowField) -> FlowFieldNested: is_incomplete=flow_field.is_incomplete, provenance_type=flow_field.provenance_type, home_id=flow_field.home_id, - depth=flow_field.depth, - immediate_upstream=flow_field.immediate_upstream, - immediate_downstream=flow_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -723,6 +646,7 @@ def _flow_field_from_nested(nested: FlowFieldNested) -> FlowField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -731,9 +655,6 @@ def _flow_field_from_nested(nested: FlowFieldNested) -> FlowField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/flow_field_operation.py b/pyatlan_v9/model/assets/flow_field_operation.py index a543c90a3..a8ef02fb0 100644 --- a/pyatlan_v9/model/assets/flow_field_operation.py +++ b/pyatlan_v9/model/assets/flow_field_operation.py @@ -44,7 +44,7 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector -from .flow_related import RelatedFlowControlOperation, RelatedFlowFieldOperation +from .flow_related import RelatedFlowControlOperation from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -121,6 +121,8 @@ class FlowFieldOperation(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowFieldOperation" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -288,66 +290,6 @@ class FlowFieldOperation(Asset): def __post_init__(self) -> None: self.type_name = "FlowFieldOperation" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowFieldOperation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FlowFieldOperation validation failed: {errors}") - - def minimize(self) -> "FlowFieldOperation": - """ - Return a minimal copy of this FlowFieldOperation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowFieldOperation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowFieldOperation instance with only the minimum required fields. - """ - self.validate() - return FlowFieldOperation(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowFieldOperation": - """ - Create a :class:`RelatedFlowFieldOperation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowFieldOperation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowFieldOperation(guid=self.guid) - return RelatedFlowFieldOperation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -726,9 +668,6 @@ def _flow_field_operation_to_nested( is_incomplete=flow_field_operation.is_incomplete, provenance_type=flow_field_operation.provenance_type, home_id=flow_field_operation.home_id, - depth=flow_field_operation.depth, - immediate_upstream=flow_field_operation.immediate_upstream, - immediate_downstream=flow_field_operation.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -764,6 +703,7 @@ def _flow_field_operation_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -772,9 +712,6 @@ def _flow_field_operation_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_field_operation_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/flow_folder.py b/pyatlan_v9/model/assets/flow_folder.py index 86c5f7ea7..cddddf733 100644 --- a/pyatlan_v9/model/assets/flow_folder.py +++ b/pyatlan_v9/model/assets/flow_folder.py @@ -93,6 +93,8 @@ class FlowFolder(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowFolder" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -210,70 +212,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"FlowFolder validation failed: {errors}") - - def minimize(self) -> "FlowFolder": - """ - Return a minimal copy of this FlowFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowFolder instance with only the minimum required fields. - """ - self.validate() - return FlowFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowFolder": - """ - Create a :class:`RelatedFlowFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowFolder(guid=self.guid) - return RelatedFlowFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -558,9 +496,6 @@ def _flow_folder_to_nested(flow_folder: FlowFolder) -> FlowFolderNested: is_incomplete=flow_folder.is_incomplete, provenance_type=flow_folder.provenance_type, home_id=flow_folder.home_id, - depth=flow_folder.depth, - immediate_upstream=flow_folder.immediate_upstream, - immediate_downstream=flow_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -592,6 +527,7 @@ def _flow_folder_from_nested(nested: FlowFolderNested) -> FlowFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -600,9 +536,6 @@ def _flow_folder_from_nested(nested: FlowFolderNested) -> FlowFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_folder_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/flow_project.py b/pyatlan_v9/model/assets/flow_project.py index b37dff1f0..4340bb77c 100644 --- a/pyatlan_v9/model/assets/flow_project.py +++ b/pyatlan_v9/model/assets/flow_project.py @@ -38,7 +38,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import RelatedFlowProject from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -90,6 +89,8 @@ class FlowProject(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowProject" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -195,66 +196,6 @@ class FlowProject(Asset): def __post_init__(self) -> None: self.type_name = "FlowProject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FlowProject validation failed: {errors}") - - def minimize(self) -> "FlowProject": - """ - Return a minimal copy of this FlowProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowProject instance with only the minimum required fields. - """ - self.validate() - return FlowProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowProject": - """ - Create a :class:`RelatedFlowProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowProject(guid=self.guid) - return RelatedFlowProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -533,9 +474,6 @@ def _flow_project_to_nested(flow_project: FlowProject) -> FlowProjectNested: is_incomplete=flow_project.is_incomplete, provenance_type=flow_project.provenance_type, home_id=flow_project.home_id, - depth=flow_project.depth, - immediate_upstream=flow_project.immediate_upstream, - immediate_downstream=flow_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -567,6 +505,7 @@ def _flow_project_from_nested(nested: FlowProjectNested) -> FlowProject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -575,9 +514,6 @@ def _flow_project_from_nested(nested: FlowProjectNested) -> FlowProject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_project_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/flow_reusable_unit.py b/pyatlan_v9/model/assets/flow_reusable_unit.py index 6c725af8b..8a87a4f68 100644 --- a/pyatlan_v9/model/assets/flow_reusable_unit.py +++ b/pyatlan_v9/model/assets/flow_reusable_unit.py @@ -38,11 +38,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import ( - RelatedFlowDataset, - RelatedFlowDatasetOperation, - RelatedFlowReusableUnit, -) +from .flow_related import RelatedFlowDataset, RelatedFlowDatasetOperation from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -99,6 +95,8 @@ class FlowReusableUnit(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowReusableUnit" + flow_dataset_count: Union[int, None, UnsetType] = UNSET """Count of the number of ephemeral datasets contained within this reusable unit.""" @@ -219,66 +217,6 @@ class FlowReusableUnit(Asset): def __post_init__(self) -> None: self.type_name = "FlowReusableUnit" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowReusableUnit instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FlowReusableUnit validation failed: {errors}") - - def minimize(self) -> "FlowReusableUnit": - """ - Return a minimal copy of this FlowReusableUnit with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowReusableUnit with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowReusableUnit instance with only the minimum required fields. - """ - self.validate() - return FlowReusableUnit(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowReusableUnit": - """ - Create a :class:`RelatedFlowReusableUnit` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowReusableUnit reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowReusableUnit(guid=self.guid) - return RelatedFlowReusableUnit(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -587,9 +525,6 @@ def _flow_reusable_unit_to_nested( is_incomplete=flow_reusable_unit.is_incomplete, provenance_type=flow_reusable_unit.provenance_type, home_id=flow_reusable_unit.home_id, - depth=flow_reusable_unit.depth, - immediate_upstream=flow_reusable_unit.immediate_upstream, - immediate_downstream=flow_reusable_unit.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -623,6 +558,7 @@ def _flow_reusable_unit_from_nested(nested: FlowReusableUnitNested) -> FlowReusa updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -631,9 +567,6 @@ def _flow_reusable_unit_from_nested(nested: FlowReusableUnitNested) -> FlowReusa is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_reusable_unit_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/folder.py b/pyatlan_v9/model/assets/folder.py index 28e125371..50d1cb73d 100644 --- a/pyatlan_v9/model/assets/folder.py +++ b/pyatlan_v9/model/assets/folder.py @@ -84,6 +84,8 @@ class Folder(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Folder" + parent_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent folder or collection in which this folder exists.""" @@ -168,72 +170,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Folder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.parent is UNSET: - errors.append("parent is required for creation") - if errors: - raise ValueError(f"Folder validation failed: {errors}") - - def minimize(self) -> "Folder": - """ - Return a minimal copy of this Folder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Folder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Folder instance with only the minimum required fields. - """ - self.validate() - return Folder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFolder": - """ - Create a :class:`RelatedFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFolder(guid=self.guid) - return RelatedFolder(qualified_name=self.qualified_name) - @classmethod def creator( cls, @@ -502,9 +438,6 @@ def _folder_to_nested(folder: Folder) -> FolderNested: is_incomplete=folder.is_incomplete, provenance_type=folder.provenance_type, home_id=folder.home_id, - depth=folder.depth, - immediate_upstream=folder.immediate_upstream, - immediate_downstream=folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -534,6 +467,7 @@ def _folder_from_nested(nested: FolderNested) -> Folder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -542,9 +476,6 @@ def _folder_from_nested(nested: FolderNested) -> Folder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_folder_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/form.py b/pyatlan_v9/model/assets/form.py index 96f6eafe2..1e50d92dc 100644 --- a/pyatlan_v9/model/assets/form.py +++ b/pyatlan_v9/model/assets/form.py @@ -38,7 +38,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .form_related import RelatedForm from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -78,6 +77,8 @@ class Form(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Form" + form_fields: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Fields in a form.""" @@ -147,66 +148,6 @@ class Form(Asset): def __post_init__(self) -> None: self.type_name = "Form" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Form instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Form validation failed: {errors}") - - def minimize(self) -> "Form": - """ - Return a minimal copy of this Form with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Form with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Form instance with only the minimum required fields. - """ - self.validate() - return Form(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedForm": - """ - Create a :class:`RelatedForm` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedForm reference to this asset. - """ - if self.guid is not UNSET: - return RelatedForm(guid=self.guid) - return RelatedForm(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -417,9 +358,6 @@ def _form_to_nested(form: Form) -> FormNested: is_incomplete=form.is_incomplete, provenance_type=form.provenance_type, home_id=form.home_id, - depth=form.depth, - immediate_upstream=form.immediate_upstream, - immediate_downstream=form.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -449,6 +387,7 @@ def _form_from_nested(nested: FormNested) -> Form: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -457,9 +396,6 @@ def _form_from_nested(nested: FormNested) -> Form: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_form_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/function.py b/pyatlan_v9/model/assets/function.py index 8db3266d3..2132c2de8 100644 --- a/pyatlan_v9/model/assets/function.py +++ b/pyatlan_v9/model/assets/function.py @@ -58,7 +58,7 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_related import RelatedFunction, RelatedSchema +from .sql_related import RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -72,19 +72,19 @@ class Function(Asset): """ FUNCTION_DEFINITION: ClassVar[Any] = None - FUNCTION_RETURN_TYPE: ClassVar[Any] = None - FUNCTION_ARGUMENTS: ClassVar[Any] = None - FUNCTION_LANGUAGE: ClassVar[Any] = None - FUNCTION_TYPE: ClassVar[Any] = None - FUNCTION_IS_EXTERNAL: ClassVar[Any] = None - FUNCTION_IS_DMF: ClassVar[Any] = None - FUNCTION_IS_SECURE: ClassVar[Any] = None - FUNCTION_IS_MEMOIZABLE: ClassVar[Any] = None - FUNCTION_RUNTIME_VERSION: ClassVar[Any] = None - FUNCTION_EXTERNAL_ACCESS_INTEGRATIONS: ClassVar[Any] = None - FUNCTION_SECRETS: ClassVar[Any] = None - FUNCTION_PACKAGES: ClassVar[Any] = None - FUNCTION_INSTALLED_PACKAGES: ClassVar[Any] = None + SQL_RETURN_TYPE: ClassVar[Any] = None + SQL_ARGUMENTS: ClassVar[Any] = None + SQL_LANGUAGE: ClassVar[Any] = None + SQL_TYPE: ClassVar[Any] = None + SQL_IS_EXTERNAL: ClassVar[Any] = None + SQL_IS_DMF: ClassVar[Any] = None + SQL_IS_SECURE: ClassVar[Any] = None + SQL_IS_MEMOIZABLE: ClassVar[Any] = None + SQL_RUNTIME_VERSION: ClassVar[Any] = None + SQL_EXTERNAL_ACCESS_INTEGRATIONS: ClassVar[Any] = None + SQL_SECRETS: ClassVar[Any] = None + SQL_PACKAGES: ClassVar[Any] = None + SQL_INSTALLED_PACKAGES: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -102,7 +102,6 @@ class Function(Asset): IS_PROFILED: ClassVar[Any] = None LAST_PROFILED_AT: ClassVar[Any] = None SQL_AI_MODEL_CONTEXT_QUALIFIED_NAME: ClassVar[Any] = None - SQL_IS_SECURE: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None @@ -141,48 +140,50 @@ class Function(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Function" + function_definition: Union[str, None, UnsetType] = UNSET """Code or set of statements that determine the output of the function.""" - function_return_type: Union[str, None, UnsetType] = UNSET + sql_return_type: Union[str, None, UnsetType] = UNSET """Data type of the value returned by the function.""" - function_arguments: Union[List[str], None, UnsetType] = UNSET + sql_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the function.""" - function_language: Union[str, None, UnsetType] = UNSET + sql_language: Union[str, None, UnsetType] = UNSET """Programming language in which the function is written.""" - function_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of function.""" - function_is_external: Union[bool, None, UnsetType] = UNSET + sql_is_external: Union[bool, None, UnsetType] = UNSET """Whether the function is stored or executed externally (true) or internally (false).""" - function_is_dmf: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="functionIsDMF" + sql_is_dmf: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlIsDMF" ) """Whether the function is a data metric function.""" - function_is_secure: Union[bool, None, UnsetType] = UNSET - """Whether sensitive information of the function is omitted for unauthorized users (true) or not (false).""" + sql_is_secure: Union[bool, None, UnsetType] = UNSET + """Whether this asset is secure (true) or not (false).""" - function_is_memoizable: Union[bool, None, UnsetType] = UNSET + sql_is_memoizable: Union[bool, None, UnsetType] = UNSET """Whether the function must re-compute if there are no underlying changes in the values (false) or not (true).""" - function_runtime_version: Union[str, None, UnsetType] = UNSET + sql_runtime_version: Union[str, None, UnsetType] = UNSET """Version of the language runtime used by the function.""" - function_external_access_integrations: Union[str, None, UnsetType] = UNSET + sql_external_access_integrations: Union[str, None, UnsetType] = UNSET """Names of external access integrations used by the function.""" - function_secrets: Union[str, None, UnsetType] = UNSET + sql_secrets: Union[str, None, UnsetType] = UNSET """Secret variables used by the function.""" - function_packages: Union[str, None, UnsetType] = UNSET + sql_packages: Union[str, None, UnsetType] = UNSET """Packages requested by the function.""" - function_installed_packages: Union[str, None, UnsetType] = UNSET + sql_installed_packages: Union[str, None, UnsetType] = UNSET """Packages actually installed for the function.""" query_count: Union[int, None, UnsetType] = UNSET @@ -238,9 +239,6 @@ class Function(Asset): ) """Unique name of the context in which the model versions exist, or empty if it does not exist within an AI model context.""" - sql_is_secure: Union[bool, None, UnsetType] = UNSET - """Whether this asset is secure (true) or not (false).""" - input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -375,80 +373,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Function instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.function_schema is UNSET: - errors.append("function_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"Function validation failed: {errors}") - - def minimize(self) -> "Function": - """ - Return a minimal copy of this Function with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Function with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Function instance with only the minimum required fields. - """ - self.validate() - return Function(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFunction": - """ - Create a :class:`RelatedFunction` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFunction reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFunction(guid=self.guid) - return RelatedFunction(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -507,45 +431,45 @@ class FunctionAttributes(AssetAttributes): function_definition: Union[str, None, UnsetType] = UNSET """Code or set of statements that determine the output of the function.""" - function_return_type: Union[str, None, UnsetType] = UNSET + sql_return_type: Union[str, None, UnsetType] = UNSET """Data type of the value returned by the function.""" - function_arguments: Union[List[str], None, UnsetType] = UNSET + sql_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the function.""" - function_language: Union[str, None, UnsetType] = UNSET + sql_language: Union[str, None, UnsetType] = UNSET """Programming language in which the function is written.""" - function_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of function.""" - function_is_external: Union[bool, None, UnsetType] = UNSET + sql_is_external: Union[bool, None, UnsetType] = UNSET """Whether the function is stored or executed externally (true) or internally (false).""" - function_is_dmf: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="functionIsDMF" + sql_is_dmf: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlIsDMF" ) """Whether the function is a data metric function.""" - function_is_secure: Union[bool, None, UnsetType] = UNSET - """Whether sensitive information of the function is omitted for unauthorized users (true) or not (false).""" + sql_is_secure: Union[bool, None, UnsetType] = UNSET + """Whether this asset is secure (true) or not (false).""" - function_is_memoizable: Union[bool, None, UnsetType] = UNSET + sql_is_memoizable: Union[bool, None, UnsetType] = UNSET """Whether the function must re-compute if there are no underlying changes in the values (false) or not (true).""" - function_runtime_version: Union[str, None, UnsetType] = UNSET + sql_runtime_version: Union[str, None, UnsetType] = UNSET """Version of the language runtime used by the function.""" - function_external_access_integrations: Union[str, None, UnsetType] = UNSET + sql_external_access_integrations: Union[str, None, UnsetType] = UNSET """Names of external access integrations used by the function.""" - function_secrets: Union[str, None, UnsetType] = UNSET + sql_secrets: Union[str, None, UnsetType] = UNSET """Secret variables used by the function.""" - function_packages: Union[str, None, UnsetType] = UNSET + sql_packages: Union[str, None, UnsetType] = UNSET """Packages requested by the function.""" - function_installed_packages: Union[str, None, UnsetType] = UNSET + sql_installed_packages: Union[str, None, UnsetType] = UNSET """Packages actually installed for the function.""" query_count: Union[int, None, UnsetType] = UNSET @@ -601,9 +525,6 @@ class FunctionAttributes(AssetAttributes): ) """Unique name of the context in which the model versions exist, or empty if it does not exist within an AI model context.""" - sql_is_secure: Union[bool, None, UnsetType] = UNSET - """Whether this asset is secure (true) or not (false).""" - class FunctionRelationshipAttributes(AssetRelationshipAttributes): """Function-specific relationship attributes for nested API format.""" @@ -795,21 +716,19 @@ def _populate_function_attrs(attrs: FunctionAttributes, obj: Function) -> None: """Populate Function-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.function_definition = obj.function_definition - attrs.function_return_type = obj.function_return_type - attrs.function_arguments = obj.function_arguments - attrs.function_language = obj.function_language - attrs.function_type = obj.function_type - attrs.function_is_external = obj.function_is_external - attrs.function_is_dmf = obj.function_is_dmf - attrs.function_is_secure = obj.function_is_secure - attrs.function_is_memoizable = obj.function_is_memoizable - attrs.function_runtime_version = obj.function_runtime_version - attrs.function_external_access_integrations = ( - obj.function_external_access_integrations - ) - attrs.function_secrets = obj.function_secrets - attrs.function_packages = obj.function_packages - attrs.function_installed_packages = obj.function_installed_packages + attrs.sql_return_type = obj.sql_return_type + attrs.sql_arguments = obj.sql_arguments + attrs.sql_language = obj.sql_language + attrs.sql_type = obj.sql_type + attrs.sql_is_external = obj.sql_is_external + attrs.sql_is_dmf = obj.sql_is_dmf + attrs.sql_is_secure = obj.sql_is_secure + attrs.sql_is_memoizable = obj.sql_is_memoizable + attrs.sql_runtime_version = obj.sql_runtime_version + attrs.sql_external_access_integrations = obj.sql_external_access_integrations + attrs.sql_secrets = obj.sql_secrets + attrs.sql_packages = obj.sql_packages + attrs.sql_installed_packages = obj.sql_installed_packages attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -827,28 +746,25 @@ def _populate_function_attrs(attrs: FunctionAttributes, obj: Function) -> None: attrs.is_profiled = obj.is_profiled attrs.last_profiled_at = obj.last_profiled_at attrs.sql_ai_model_context_qualified_name = obj.sql_ai_model_context_qualified_name - attrs.sql_is_secure = obj.sql_is_secure def _extract_function_attrs(attrs: FunctionAttributes) -> dict: """Extract all Function attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["function_definition"] = attrs.function_definition - result["function_return_type"] = attrs.function_return_type - result["function_arguments"] = attrs.function_arguments - result["function_language"] = attrs.function_language - result["function_type"] = attrs.function_type - result["function_is_external"] = attrs.function_is_external - result["function_is_dmf"] = attrs.function_is_dmf - result["function_is_secure"] = attrs.function_is_secure - result["function_is_memoizable"] = attrs.function_is_memoizable - result["function_runtime_version"] = attrs.function_runtime_version - result["function_external_access_integrations"] = ( - attrs.function_external_access_integrations - ) - result["function_secrets"] = attrs.function_secrets - result["function_packages"] = attrs.function_packages - result["function_installed_packages"] = attrs.function_installed_packages + result["sql_return_type"] = attrs.sql_return_type + result["sql_arguments"] = attrs.sql_arguments + result["sql_language"] = attrs.sql_language + result["sql_type"] = attrs.sql_type + result["sql_is_external"] = attrs.sql_is_external + result["sql_is_dmf"] = attrs.sql_is_dmf + result["sql_is_secure"] = attrs.sql_is_secure + result["sql_is_memoizable"] = attrs.sql_is_memoizable + result["sql_runtime_version"] = attrs.sql_runtime_version + result["sql_external_access_integrations"] = attrs.sql_external_access_integrations + result["sql_secrets"] = attrs.sql_secrets + result["sql_packages"] = attrs.sql_packages + result["sql_installed_packages"] = attrs.sql_installed_packages result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -868,7 +784,6 @@ def _extract_function_attrs(attrs: FunctionAttributes) -> dict: result["sql_ai_model_context_qualified_name"] = ( attrs.sql_ai_model_context_qualified_name ) - result["sql_is_secure"] = attrs.sql_is_secure return result @@ -905,9 +820,6 @@ def _function_to_nested(function: Function) -> FunctionNested: is_incomplete=function.is_incomplete, provenance_type=function.provenance_type, home_id=function.home_id, - depth=function.depth, - immediate_upstream=function.immediate_upstream, - immediate_downstream=function.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -939,6 +851,7 @@ def _function_from_nested(nested: FunctionNested) -> Function: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -947,9 +860,6 @@ def _function_from_nested(nested: FunctionNested) -> Function: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_function_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -979,26 +889,24 @@ def _function_from_nested_bytes(data: bytes, serde: Serde) -> Function: ) Function.FUNCTION_DEFINITION = KeywordField("functionDefinition", "functionDefinition") -Function.FUNCTION_RETURN_TYPE = KeywordField("functionReturnType", "functionReturnType") -Function.FUNCTION_ARGUMENTS = KeywordField("functionArguments", "functionArguments") -Function.FUNCTION_LANGUAGE = KeywordField("functionLanguage", "functionLanguage") -Function.FUNCTION_TYPE = KeywordField("functionType", "functionType") -Function.FUNCTION_IS_EXTERNAL = BooleanField("functionIsExternal", "functionIsExternal") -Function.FUNCTION_IS_DMF = BooleanField("functionIsDMF", "functionIsDMF") -Function.FUNCTION_IS_SECURE = BooleanField("functionIsSecure", "functionIsSecure") -Function.FUNCTION_IS_MEMOIZABLE = BooleanField( - "functionIsMemoizable", "functionIsMemoizable" -) -Function.FUNCTION_RUNTIME_VERSION = KeywordTextField( - "functionRuntimeVersion", "functionRuntimeVersion", "functionRuntimeVersion.text" +Function.SQL_RETURN_TYPE = KeywordField("sqlReturnType", "sqlReturnType") +Function.SQL_ARGUMENTS = KeywordField("sqlArguments", "sqlArguments") +Function.SQL_LANGUAGE = KeywordField("sqlLanguage", "sqlLanguage") +Function.SQL_TYPE = KeywordField("sqlType", "sqlType") +Function.SQL_IS_EXTERNAL = BooleanField("sqlIsExternal", "sqlIsExternal") +Function.SQL_IS_DMF = BooleanField("sqlIsDMF", "sqlIsDMF") +Function.SQL_IS_SECURE = BooleanField("sqlIsSecure", "sqlIsSecure") +Function.SQL_IS_MEMOIZABLE = BooleanField("sqlIsMemoizable", "sqlIsMemoizable") +Function.SQL_RUNTIME_VERSION = KeywordTextField( + "sqlRuntimeVersion", "sqlRuntimeVersion", "sqlRuntimeVersion.text" ) -Function.FUNCTION_EXTERNAL_ACCESS_INTEGRATIONS = KeywordField( - "functionExternalAccessIntegrations", "functionExternalAccessIntegrations" +Function.SQL_EXTERNAL_ACCESS_INTEGRATIONS = KeywordField( + "sqlExternalAccessIntegrations", "sqlExternalAccessIntegrations" ) -Function.FUNCTION_SECRETS = KeywordField("functionSecrets", "functionSecrets") -Function.FUNCTION_PACKAGES = KeywordField("functionPackages", "functionPackages") -Function.FUNCTION_INSTALLED_PACKAGES = KeywordField( - "functionInstalledPackages", "functionInstalledPackages" +Function.SQL_SECRETS = KeywordField("sqlSecrets", "sqlSecrets") +Function.SQL_PACKAGES = KeywordField("sqlPackages", "sqlPackages") +Function.SQL_INSTALLED_PACKAGES = KeywordField( + "sqlInstalledPackages", "sqlInstalledPackages" ) Function.QUERY_COUNT = NumericField("queryCount", "queryCount") Function.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") @@ -1029,7 +937,6 @@ def _function_from_nested_bytes(data: bytes, serde: Serde) -> Function: Function.SQL_AI_MODEL_CONTEXT_QUALIFIED_NAME = KeywordField( "sqlAIModelContextQualifiedName", "sqlAIModelContextQualifiedName" ) -Function.SQL_IS_SECURE = BooleanField("sqlIsSecure", "sqlIsSecure") Function.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") Function.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") Function.ANOMALO_CHECKS = RelationField("anomaloChecks") diff --git a/pyatlan_v9/model/assets/gcs.py b/pyatlan_v9/model/assets/gcs.py index e2308a1bd..3712f0c6e 100644 --- a/pyatlan_v9/model/assets/gcs.py +++ b/pyatlan_v9/model/assets/gcs.py @@ -40,7 +40,6 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .gcs_related import RelatedGCS from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -107,6 +106,8 @@ class GCS(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "GCS" + gcs_storage_class: Union[str, None, UnsetType] = UNSET """Storage class of this asset.""" @@ -247,66 +248,6 @@ class GCS(Asset): def __post_init__(self) -> None: self.type_name = "GCS" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this GCS instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"GCS validation failed: {errors}") - - def minimize(self) -> "GCS": - """ - Return a minimal copy of this GCS with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new GCS with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new GCS instance with only the minimum required fields. - """ - self.validate() - return GCS(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedGCS": - """ - Create a :class:`RelatedGCS` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedGCS reference to this asset. - """ - if self.guid is not UNSET: - return RelatedGCS(guid=self.guid) - return RelatedGCS(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -624,9 +565,6 @@ def _gcs_to_nested(gcs: GCS) -> GCSNested: is_incomplete=gcs.is_incomplete, provenance_type=gcs.provenance_type, home_id=gcs.home_id, - depth=gcs.depth, - immediate_upstream=gcs.immediate_upstream, - immediate_downstream=gcs.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -656,6 +594,7 @@ def _gcs_from_nested(nested: GCSNested) -> GCS: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -664,9 +603,6 @@ def _gcs_from_nested(nested: GCSNested) -> GCS: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_gcs_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/gcs_bucket.py b/pyatlan_v9/model/assets/gcs_bucket.py index 9a2dfb5fe..8870b320a 100644 --- a/pyatlan_v9/model/assets/gcs_bucket.py +++ b/pyatlan_v9/model/assets/gcs_bucket.py @@ -41,7 +41,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .gcs_related import RelatedGCSBucket, RelatedGCSObject +from .gcs_related import RelatedGCSObject from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -116,6 +116,8 @@ class GCSBucket(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "GCSBucket" + gcs_object_count: Union[int, None, UnsetType] = UNSET """Number of objects within the bucket.""" @@ -280,66 +282,6 @@ class GCSBucket(Asset): def __post_init__(self) -> None: self.type_name = "GCSBucket" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this GCSBucket instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"GCSBucket validation failed: {errors}") - - def minimize(self) -> "GCSBucket": - """ - Return a minimal copy of this GCSBucket with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new GCSBucket with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new GCSBucket instance with only the minimum required fields. - """ - self.validate() - return GCSBucket(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedGCSBucket": - """ - Create a :class:`RelatedGCSBucket` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedGCSBucket reference to this asset. - """ - if self.guid is not UNSET: - return RelatedGCSBucket(guid=self.guid) - return RelatedGCSBucket(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, name: str, connection_qualified_name: str) -> "GCSBucket": @@ -757,9 +699,6 @@ def _gcs_bucket_to_nested(gcs_bucket: GCSBucket) -> GCSBucketNested: is_incomplete=gcs_bucket.is_incomplete, provenance_type=gcs_bucket.provenance_type, home_id=gcs_bucket.home_id, - depth=gcs_bucket.depth, - immediate_upstream=gcs_bucket.immediate_upstream, - immediate_downstream=gcs_bucket.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -791,6 +730,7 @@ def _gcs_bucket_from_nested(nested: GCSBucketNested) -> GCSBucket: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -799,9 +739,6 @@ def _gcs_bucket_from_nested(nested: GCSBucketNested) -> GCSBucket: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_gcs_bucket_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/gcs_object.py b/pyatlan_v9/model/assets/gcs_object.py index 598eecc72..e33e21d90 100644 --- a/pyatlan_v9/model/assets/gcs_object.py +++ b/pyatlan_v9/model/assets/gcs_object.py @@ -43,7 +43,7 @@ ) from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .gcs_related import RelatedGCSBucket, RelatedGCSObject +from .gcs_related import RelatedGCSBucket from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -126,6 +126,8 @@ class GCSObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "GCSObject" + gcs_bucket_name: Union[str, None, UnsetType] = UNSET """Simple name of the bucket in which this object exists.""" @@ -324,76 +326,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this GCSObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.gcs_bucket is UNSET: - errors.append("gcs_bucket is required for creation") - if self.gcs_bucket_name is UNSET: - errors.append("gcs_bucket_name is required for creation") - if self.gcs_bucket_qualified_name is UNSET: - errors.append("gcs_bucket_qualified_name is required for creation") - if errors: - raise ValueError(f"GCSObject validation failed: {errors}") - - def minimize(self) -> "GCSObject": - """ - Return a minimal copy of this GCSObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new GCSObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new GCSObject instance with only the minimum required fields. - """ - self.validate() - return GCSObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedGCSObject": - """ - Create a :class:`RelatedGCSObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedGCSObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedGCSObject(guid=self.guid) - return RelatedGCSObject(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -940,9 +872,6 @@ def _gcs_object_to_nested(gcs_object: GCSObject) -> GCSObjectNested: is_incomplete=gcs_object.is_incomplete, provenance_type=gcs_object.provenance_type, home_id=gcs_object.home_id, - depth=gcs_object.depth, - immediate_upstream=gcs_object.immediate_upstream, - immediate_downstream=gcs_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -974,6 +903,7 @@ def _gcs_object_from_nested(nested: GCSObjectNested) -> GCSObject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -982,9 +912,6 @@ def _gcs_object_from_nested(nested: GCSObjectNested) -> GCSObject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_gcs_object_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/google.py b/pyatlan_v9/model/assets/google.py index 1beba0980..a2e3480fa 100644 --- a/pyatlan_v9/model/assets/google.py +++ b/pyatlan_v9/model/assets/google.py @@ -36,7 +36,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cloud_related import RelatedGoogle from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -60,7 +59,7 @@ class Google(Asset): GOOGLE_SERVICE: ClassVar[Any] = None GOOGLE_PROJECT_NAME: ClassVar[Any] = None GOOGLE_PROJECT_ID: ClassVar[Any] = None - GOOGLE_PROJECT_NUMBER: ClassVar[Any] = None + CLOUD_PROJECT_NUMBER: ClassVar[Any] = None GOOGLE_LOCATION: ClassVar[Any] = None GOOGLE_LOCATION_TYPE: ClassVar[Any] = None GOOGLE_LABELS: ClassVar[Any] = None @@ -85,6 +84,8 @@ class Google(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Google" + google_service: Union[str, None, UnsetType] = UNSET """Service in Google in which the asset exists.""" @@ -94,7 +95,7 @@ class Google(Asset): google_project_id: Union[str, None, UnsetType] = UNSET """ID of the project in which the asset exists.""" - google_project_number: Union[int, None, UnsetType] = UNSET + cloud_project_number: Union[int, None, UnsetType] = UNSET """Number of the project in which the asset exists.""" google_location: Union[str, None, UnsetType] = UNSET @@ -175,66 +176,6 @@ class Google(Asset): def __post_init__(self) -> None: self.type_name = "Google" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Google instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Google validation failed: {errors}") - - def minimize(self) -> "Google": - """ - Return a minimal copy of this Google with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Google with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Google instance with only the minimum required fields. - """ - self.validate() - return Google(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedGoogle": - """ - Create a :class:`RelatedGoogle` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedGoogle reference to this asset. - """ - if self.guid is not UNSET: - return RelatedGoogle(guid=self.guid) - return RelatedGoogle(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -299,7 +240,7 @@ class GoogleAttributes(AssetAttributes): google_project_id: Union[str, None, UnsetType] = UNSET """ID of the project in which the asset exists.""" - google_project_number: Union[int, None, UnsetType] = UNSET + cloud_project_number: Union[int, None, UnsetType] = UNSET """Number of the project in which the asset exists.""" google_location: Union[str, None, UnsetType] = UNSET @@ -428,7 +369,7 @@ def _populate_google_attrs(attrs: GoogleAttributes, obj: Google) -> None: attrs.google_service = obj.google_service attrs.google_project_name = obj.google_project_name attrs.google_project_id = obj.google_project_id - attrs.google_project_number = obj.google_project_number + attrs.cloud_project_number = obj.cloud_project_number attrs.google_location = obj.google_location attrs.google_location_type = obj.google_location_type attrs.google_labels = obj.google_labels @@ -442,7 +383,7 @@ def _extract_google_attrs(attrs: GoogleAttributes) -> dict: result["google_service"] = attrs.google_service result["google_project_name"] = attrs.google_project_name result["google_project_id"] = attrs.google_project_id - result["google_project_number"] = attrs.google_project_number + result["cloud_project_number"] = attrs.cloud_project_number result["google_location"] = attrs.google_location result["google_location_type"] = attrs.google_location_type result["google_labels"] = attrs.google_labels @@ -484,9 +425,6 @@ def _google_to_nested(google: Google) -> GoogleNested: is_incomplete=google.is_incomplete, provenance_type=google.provenance_type, home_id=google.home_id, - depth=google.depth, - immediate_upstream=google.immediate_upstream, - immediate_downstream=google.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -516,6 +454,7 @@ def _google_from_nested(nested: GoogleNested) -> Google: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -524,9 +463,6 @@ def _google_from_nested(nested: GoogleNested) -> Google: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_google_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -561,9 +497,7 @@ def _google_from_nested_bytes(data: bytes, serde: Serde) -> Google: Google.GOOGLE_PROJECT_ID = KeywordTextField( "googleProjectId", "googleProjectId", "googleProjectId.text" ) -Google.GOOGLE_PROJECT_NUMBER = NumericField( - "googleProjectNumber", "googleProjectNumber" -) +Google.CLOUD_PROJECT_NUMBER = NumericField("cloudProjectNumber", "cloudProjectNumber") Google.GOOGLE_LOCATION = KeywordField("googleLocation", "googleLocation") Google.GOOGLE_LOCATION_TYPE = KeywordField("googleLocationType", "googleLocationType") Google.GOOGLE_LABELS = KeywordField("googleLabels", "googleLabels") diff --git a/pyatlan_v9/model/assets/iceberg.py b/pyatlan_v9/model/assets/iceberg.py index 84b86c9ff..6d0723af8 100644 --- a/pyatlan_v9/model/assets/iceberg.py +++ b/pyatlan_v9/model/assets/iceberg.py @@ -47,7 +47,6 @@ RelatedDbtTest, ) from .gtc_related import RelatedAtlasGlossaryTerm -from .iceberg_related import RelatedIceberg from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -126,6 +125,8 @@ class Iceberg(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Iceberg" + iceberg_parent_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the immediate parent namespace in which this asset exists.""" @@ -308,66 +309,6 @@ class Iceberg(Asset): def __post_init__(self) -> None: self.type_name = "Iceberg" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Iceberg instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Iceberg validation failed: {errors}") - - def minimize(self) -> "Iceberg": - """ - Return a minimal copy of this Iceberg with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Iceberg with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Iceberg instance with only the minimum required fields. - """ - self.validate() - return Iceberg(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIceberg": - """ - Create a :class:`RelatedIceberg` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIceberg reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIceberg(guid=self.guid) - return RelatedIceberg(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -754,9 +695,6 @@ def _iceberg_to_nested(iceberg: Iceberg) -> IcebergNested: is_incomplete=iceberg.is_incomplete, provenance_type=iceberg.provenance_type, home_id=iceberg.home_id, - depth=iceberg.depth, - immediate_upstream=iceberg.immediate_upstream, - immediate_downstream=iceberg.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -786,6 +724,7 @@ def _iceberg_from_nested(nested: IcebergNested) -> Iceberg: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -794,9 +733,6 @@ def _iceberg_from_nested(nested: IcebergNested) -> Iceberg: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/iceberg_catalog.py b/pyatlan_v9/model/assets/iceberg_catalog.py index e6d6bfa69..56cb9c56d 100644 --- a/pyatlan_v9/model/assets/iceberg_catalog.py +++ b/pyatlan_v9/model/assets/iceberg_catalog.py @@ -48,7 +48,6 @@ ) from .fabric_related import RelatedFabricWorkspace from .gtc_related import RelatedAtlasGlossaryTerm -from .iceberg_related import RelatedIcebergCatalog from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -136,6 +135,8 @@ class IcebergCatalog(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "IcebergCatalog" + iceberg_catalog_type: Union[str, None, UnsetType] = UNSET """Type of the Iceberg catalog (e.g., 'hadoop', 'hive', 'nessie', 'rest').""" @@ -342,66 +343,6 @@ class IcebergCatalog(Asset): def __post_init__(self) -> None: self.type_name = "IcebergCatalog" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this IcebergCatalog instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"IcebergCatalog validation failed: {errors}") - - def minimize(self) -> "IcebergCatalog": - """ - Return a minimal copy of this IcebergCatalog with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new IcebergCatalog with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new IcebergCatalog instance with only the minimum required fields. - """ - self.validate() - return IcebergCatalog(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIcebergCatalog": - """ - Create a :class:`RelatedIcebergCatalog` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIcebergCatalog reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIcebergCatalog(guid=self.guid) - return RelatedIcebergCatalog(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -832,9 +773,6 @@ def _iceberg_catalog_to_nested(iceberg_catalog: IcebergCatalog) -> IcebergCatalo is_incomplete=iceberg_catalog.is_incomplete, provenance_type=iceberg_catalog.provenance_type, home_id=iceberg_catalog.home_id, - depth=iceberg_catalog.depth, - immediate_upstream=iceberg_catalog.immediate_upstream, - immediate_downstream=iceberg_catalog.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -868,6 +806,7 @@ def _iceberg_catalog_from_nested(nested: IcebergCatalogNested) -> IcebergCatalog updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -876,9 +815,6 @@ def _iceberg_catalog_from_nested(nested: IcebergCatalogNested) -> IcebergCatalog is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_catalog_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/iceberg_column.py b/pyatlan_v9/model/assets/iceberg_column.py index ee667d782..e252922f7 100644 --- a/pyatlan_v9/model/assets/iceberg_column.py +++ b/pyatlan_v9/model/assets/iceberg_column.py @@ -50,7 +50,6 @@ RelatedDbtTest, ) from .gtc_related import RelatedAtlasGlossaryTerm -from .iceberg_related import RelatedIcebergColumn from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -230,6 +229,8 @@ class IcebergColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "IcebergColumn" + iceberg_parent_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the immediate parent namespace in which this asset exists.""" @@ -686,69 +687,6 @@ class IcebergColumn(Asset): def __post_init__(self) -> None: self.type_name = "IcebergColumn" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this IcebergColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.order is UNSET: - errors.append("order is required for creation") - if errors: - raise ValueError(f"IcebergColumn validation failed: {errors}") - - def minimize(self) -> "IcebergColumn": - """ - Return a minimal copy of this IcebergColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new IcebergColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new IcebergColumn instance with only the minimum required fields. - """ - self.validate() - return IcebergColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIcebergColumn": - """ - Create a :class:`RelatedIcebergColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIcebergColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIcebergColumn(guid=self.guid) - return RelatedIcebergColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1573,9 +1511,6 @@ def _iceberg_column_to_nested(iceberg_column: IcebergColumn) -> IcebergColumnNes is_incomplete=iceberg_column.is_incomplete, provenance_type=iceberg_column.provenance_type, home_id=iceberg_column.home_id, - depth=iceberg_column.depth, - immediate_upstream=iceberg_column.immediate_upstream, - immediate_downstream=iceberg_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1609,6 +1544,7 @@ def _iceberg_column_from_nested(nested: IcebergColumnNested) -> IcebergColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1617,9 +1553,6 @@ def _iceberg_column_from_nested(nested: IcebergColumnNested) -> IcebergColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/iceberg_namespace.py b/pyatlan_v9/model/assets/iceberg_namespace.py index 541d4469c..bd7027334 100644 --- a/pyatlan_v9/model/assets/iceberg_namespace.py +++ b/pyatlan_v9/model/assets/iceberg_namespace.py @@ -168,6 +168,8 @@ class IcebergNamespace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "IcebergNamespace" + iceberg_parent_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the immediate parent namespace in which this asset exists.""" @@ -432,70 +434,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this IcebergNamespace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"IcebergNamespace validation failed: {errors}") - - def minimize(self) -> "IcebergNamespace": - """ - Return a minimal copy of this IcebergNamespace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new IcebergNamespace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new IcebergNamespace instance with only the minimum required fields. - """ - self.validate() - return IcebergNamespace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIcebergNamespace": - """ - Create a :class:`RelatedIcebergNamespace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIcebergNamespace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIcebergNamespace(guid=self.guid) - return RelatedIcebergNamespace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -994,9 +932,6 @@ def _iceberg_namespace_to_nested( is_incomplete=iceberg_namespace.is_incomplete, provenance_type=iceberg_namespace.provenance_type, home_id=iceberg_namespace.home_id, - depth=iceberg_namespace.depth, - immediate_upstream=iceberg_namespace.immediate_upstream, - immediate_downstream=iceberg_namespace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1030,6 +965,7 @@ def _iceberg_namespace_from_nested(nested: IcebergNamespaceNested) -> IcebergNam updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1038,9 +974,6 @@ def _iceberg_namespace_from_nested(nested: IcebergNamespaceNested) -> IcebergNam is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_namespace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/iceberg_table.py b/pyatlan_v9/model/assets/iceberg_table.py index 928bda163..03c7e085c 100644 --- a/pyatlan_v9/model/assets/iceberg_table.py +++ b/pyatlan_v9/model/assets/iceberg_table.py @@ -47,7 +47,6 @@ RelatedDbtTest, ) from .gtc_related import RelatedAtlasGlossaryTerm -from .iceberg_related import RelatedIcebergTable from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -171,6 +170,8 @@ class IcebergTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "IcebergTable" + iceberg_current_snapshot_id: Union[int, None, UnsetType] = UNSET """Current snapshot identifier for this Iceberg table.""" @@ -467,66 +468,6 @@ class IcebergTable(Asset): def __post_init__(self) -> None: self.type_name = "IcebergTable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this IcebergTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"IcebergTable validation failed: {errors}") - - def minimize(self) -> "IcebergTable": - """ - Return a minimal copy of this IcebergTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new IcebergTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new IcebergTable instance with only the minimum required fields. - """ - self.validate() - return IcebergTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIcebergTable": - """ - Create a :class:`RelatedIcebergTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIcebergTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIcebergTable(guid=self.guid) - return RelatedIcebergTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1101,9 +1042,6 @@ def _iceberg_table_to_nested(iceberg_table: IcebergTable) -> IcebergTableNested: is_incomplete=iceberg_table.is_incomplete, provenance_type=iceberg_table.provenance_type, home_id=iceberg_table.home_id, - depth=iceberg_table.depth, - immediate_upstream=iceberg_table.immediate_upstream, - immediate_downstream=iceberg_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1137,6 +1075,7 @@ def _iceberg_table_from_nested(nested: IcebergTableNested) -> IcebergTable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1145,9 +1084,6 @@ def _iceberg_table_from_nested(nested: IcebergTableNested) -> IcebergTable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_table_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/incident.py b/pyatlan_v9/model/assets/incident.py index b47e692c2..a98923127 100644 --- a/pyatlan_v9/model/assets/incident.py +++ b/pyatlan_v9/model/assets/incident.py @@ -27,7 +27,6 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedIncident from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -56,7 +55,7 @@ class Incident(Referenceable): Base class for Incident assets. """ - INCIDENT_SEVERITY: ClassVar[Any] = None + ASSET_SEVERITY: ClassVar[Any] = None NAME: ClassVar[Any] = None DISPLAY_NAME: ClassVar[Any] = None DESCRIPTION: ClassVar[Any] = None @@ -75,6 +74,7 @@ class Incident(Referenceable): ANNOUNCEMENT_TYPE: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_AT: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_BY: ClassVar[Any] = None + ASSET_ANNOUNCEMENT_EXPIRED_AT: ClassVar[Any] = None OWNER_USERS: ClassVar[Any] = None OWNER_GROUPS: ClassVar[Any] = None ADMIN_USERS: ClassVar[Any] = None @@ -264,7 +264,9 @@ class Incident(Referenceable): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None - incident_severity: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "Incident" + + asset_severity: Union[str, None, UnsetType] = UNSET """Status of this asset's severity.""" name: Union[str, None, UnsetType] = UNSET @@ -321,6 +323,9 @@ class Incident(Referenceable): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -994,66 +999,6 @@ class Incident(Referenceable): def __post_init__(self) -> None: self.type_name = "Incident" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Incident instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Incident validation failed: {errors}") - - def minimize(self) -> "Incident": - """ - Return a minimal copy of this Incident with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Incident with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Incident instance with only the minimum required fields. - """ - self.validate() - return Incident(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIncident": - """ - Create a :class:`RelatedIncident` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIncident reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIncident(guid=self.guid) - return RelatedIncident(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1109,7 +1054,7 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> Incident: class IncidentAttributes(ReferenceableAttributes): """Incident-specific attributes for nested API format.""" - incident_severity: Union[str, None, UnsetType] = UNSET + asset_severity: Union[str, None, UnsetType] = UNSET """Status of this asset's severity.""" name: Union[str, None, UnsetType] = UNSET @@ -1166,6 +1111,9 @@ class IncidentAttributes(ReferenceableAttributes): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -1884,7 +1832,7 @@ class IncidentNested(ReferenceableNested): def _populate_incident_attrs(attrs: IncidentAttributes, obj: Incident) -> None: """Populate Incident-specific attributes on the attrs struct.""" _populate_referenceable_attrs(attrs, obj) - attrs.incident_severity = obj.incident_severity + attrs.asset_severity = obj.asset_severity attrs.name = obj.name attrs.display_name = obj.display_name attrs.description = obj.description @@ -1907,6 +1855,7 @@ def _populate_incident_attrs(attrs: IncidentAttributes, obj: Incident) -> None: attrs.announcement_type = obj.announcement_type attrs.announcement_updated_at = obj.announcement_updated_at attrs.announcement_updated_by = obj.announcement_updated_by + attrs.asset_announcement_expired_at = obj.asset_announcement_expired_at attrs.owner_users = obj.owner_users attrs.owner_groups = obj.owner_groups attrs.admin_users = obj.admin_users @@ -2130,7 +2079,7 @@ def _populate_incident_attrs(attrs: IncidentAttributes, obj: Incident) -> None: def _extract_incident_attrs(attrs: IncidentAttributes) -> dict: """Extract all Incident attributes from the attrs struct into a flat dict.""" result = _extract_referenceable_attrs(attrs) - result["incident_severity"] = attrs.incident_severity + result["asset_severity"] = attrs.asset_severity result["name"] = attrs.name result["display_name"] = attrs.display_name result["description"] = attrs.description @@ -2153,6 +2102,7 @@ def _extract_incident_attrs(attrs: IncidentAttributes) -> dict: result["announcement_type"] = attrs.announcement_type result["announcement_updated_at"] = attrs.announcement_updated_at result["announcement_updated_by"] = attrs.announcement_updated_by + result["asset_announcement_expired_at"] = attrs.asset_announcement_expired_at result["owner_users"] = attrs.owner_users result["owner_groups"] = attrs.owner_groups result["admin_users"] = attrs.admin_users @@ -2441,9 +2391,6 @@ def _incident_to_nested(incident: Incident) -> IncidentNested: is_incomplete=incident.is_incomplete, provenance_type=incident.provenance_type, home_id=incident.home_id, - depth=incident.depth, - immediate_upstream=incident.immediate_upstream, - immediate_downstream=incident.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2475,6 +2422,7 @@ def _incident_from_nested(nested: IncidentNested) -> Incident: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2483,9 +2431,6 @@ def _incident_from_nested(nested: IncidentNested) -> Incident: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_incident_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2516,7 +2461,7 @@ def _incident_from_nested_bytes(data: bytes, serde: Serde) -> Incident: TextField, ) -Incident.INCIDENT_SEVERITY = KeywordField("incidentSeverity", "incidentSeverity") +Incident.ASSET_SEVERITY = KeywordField("assetSeverity", "assetSeverity") Incident.NAME = KeywordField("name", "name") Incident.DISPLAY_NAME = KeywordField("displayName", "displayName") Incident.DESCRIPTION = KeywordField("description", "description") @@ -2557,6 +2502,9 @@ def _incident_from_nested_bytes(data: bytes, serde: Serde) -> Incident: Incident.ANNOUNCEMENT_UPDATED_BY = KeywordField( "announcementUpdatedBy", "announcementUpdatedBy" ) +Incident.ASSET_ANNOUNCEMENT_EXPIRED_AT = NumericField( + "assetAnnouncementExpiredAt", "assetAnnouncementExpiredAt" +) Incident.OWNER_USERS = KeywordField("ownerUsers", "ownerUsers") Incident.OWNER_GROUPS = KeywordField("ownerGroups", "ownerGroups") Incident.ADMIN_USERS = KeywordField("adminUsers", "adminUsers") diff --git a/pyatlan_v9/model/assets/infrastructure.py b/pyatlan_v9/model/assets/infrastructure.py index 1ed50ac75..c50308ac6 100644 --- a/pyatlan_v9/model/assets/infrastructure.py +++ b/pyatlan_v9/model/assets/infrastructure.py @@ -27,7 +27,6 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedInfrastructure from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -74,6 +73,7 @@ class Infrastructure(Referenceable): ANNOUNCEMENT_TYPE: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_AT: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_BY: ClassVar[Any] = None + ASSET_ANNOUNCEMENT_EXPIRED_AT: ClassVar[Any] = None OWNER_USERS: ClassVar[Any] = None OWNER_GROUPS: ClassVar[Any] = None ADMIN_USERS: ClassVar[Any] = None @@ -263,6 +263,8 @@ class Infrastructure(Referenceable): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Infrastructure" + name: Union[str, None, UnsetType] = UNSET """Name of this asset. Fallback for display purposes, if displayName is empty.""" @@ -317,6 +319,9 @@ class Infrastructure(Referenceable): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -990,66 +995,6 @@ class Infrastructure(Referenceable): def __post_init__(self) -> None: self.type_name = "Infrastructure" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Infrastructure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Infrastructure validation failed: {errors}") - - def minimize(self) -> "Infrastructure": - """ - Return a minimal copy of this Infrastructure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Infrastructure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Infrastructure instance with only the minimum required fields. - """ - self.validate() - return Infrastructure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedInfrastructure": - """ - Create a :class:`RelatedInfrastructure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedInfrastructure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedInfrastructure(guid=self.guid) - return RelatedInfrastructure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1159,6 +1104,9 @@ class InfrastructureAttributes(ReferenceableAttributes): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -1903,6 +1851,7 @@ def _populate_infrastructure_attrs( attrs.announcement_type = obj.announcement_type attrs.announcement_updated_at = obj.announcement_updated_at attrs.announcement_updated_by = obj.announcement_updated_by + attrs.asset_announcement_expired_at = obj.asset_announcement_expired_at attrs.owner_users = obj.owner_users attrs.owner_groups = obj.owner_groups attrs.admin_users = obj.admin_users @@ -2148,6 +2097,7 @@ def _extract_infrastructure_attrs(attrs: InfrastructureAttributes) -> dict: result["announcement_type"] = attrs.announcement_type result["announcement_updated_at"] = attrs.announcement_updated_at result["announcement_updated_by"] = attrs.announcement_updated_by + result["asset_announcement_expired_at"] = attrs.asset_announcement_expired_at result["owner_users"] = attrs.owner_users result["owner_groups"] = attrs.owner_groups result["admin_users"] = attrs.admin_users @@ -2436,9 +2386,6 @@ def _infrastructure_to_nested(infrastructure: Infrastructure) -> InfrastructureN is_incomplete=infrastructure.is_incomplete, provenance_type=infrastructure.provenance_type, home_id=infrastructure.home_id, - depth=infrastructure.depth, - immediate_upstream=infrastructure.immediate_upstream, - immediate_downstream=infrastructure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2472,6 +2419,7 @@ def _infrastructure_from_nested(nested: InfrastructureNested) -> Infrastructure: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2480,9 +2428,6 @@ def _infrastructure_from_nested(nested: InfrastructureNested) -> Infrastructure: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_infrastructure_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2557,6 +2502,9 @@ def _infrastructure_from_nested_bytes(data: bytes, serde: Serde) -> Infrastructu Infrastructure.ANNOUNCEMENT_UPDATED_BY = KeywordField( "announcementUpdatedBy", "announcementUpdatedBy" ) +Infrastructure.ASSET_ANNOUNCEMENT_EXPIRED_AT = NumericField( + "assetAnnouncementExpiredAt", "assetAnnouncementExpiredAt" +) Infrastructure.OWNER_USERS = KeywordField("ownerUsers", "ownerUsers") Infrastructure.OWNER_GROUPS = KeywordField("ownerGroups", "ownerGroups") Infrastructure.ADMIN_USERS = KeywordField("adminUsers", "adminUsers") diff --git a/pyatlan_v9/model/assets/insight.py b/pyatlan_v9/model/assets/insight.py index 9fffcfc60..d4192e538 100644 --- a/pyatlan_v9/model/assets/insight.py +++ b/pyatlan_v9/model/assets/insight.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedInsight from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class Insight(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Insight" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class Insight(Asset): def __post_init__(self) -> None: self.type_name = "Insight" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Insight instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Insight validation failed: {errors}") - - def minimize(self) -> "Insight": - """ - Return a minimal copy of this Insight with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Insight with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Insight instance with only the minimum required fields. - """ - self.validate() - return Insight(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedInsight": - """ - Create a :class:`RelatedInsight` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedInsight reference to this asset. - """ - if self.guid is not UNSET: - return RelatedInsight(guid=self.guid) - return RelatedInsight(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _insight_to_nested(insight: Insight) -> InsightNested: is_incomplete=insight.is_incomplete, provenance_type=insight.provenance_type, home_id=insight.home_id, - depth=insight.depth, - immediate_upstream=insight.immediate_upstream, - immediate_downstream=insight.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -525,6 +463,7 @@ def _insight_from_nested(nested: InsightNested) -> Insight: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -533,9 +472,6 @@ def _insight_from_nested(nested: InsightNested) -> Insight: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_insight_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/kafka.py b/pyatlan_v9/model/assets/kafka.py index 8158463e2..3351e2aad 100644 --- a/pyatlan_v9/model/assets/kafka.py +++ b/pyatlan_v9/model/assets/kafka.py @@ -40,7 +40,6 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .kafka_related import RelatedKafka from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -91,6 +90,8 @@ class Kafka(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Kafka" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class Kafka(Asset): def __post_init__(self) -> None: self.type_name = "Kafka" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Kafka instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Kafka validation failed: {errors}") - - def minimize(self) -> "Kafka": - """ - Return a minimal copy of this Kafka with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Kafka with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Kafka instance with only the minimum required fields. - """ - self.validate() - return Kafka(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedKafka": - """ - Create a :class:`RelatedKafka` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedKafka reference to this asset. - """ - if self.guid is not UNSET: - return RelatedKafka(guid=self.guid) - return RelatedKafka(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _kafka_to_nested(kafka: Kafka) -> KafkaNested: is_incomplete=kafka.is_incomplete, provenance_type=kafka.provenance_type, home_id=kafka.home_id, - depth=kafka.depth, - immediate_upstream=kafka.immediate_upstream, - immediate_downstream=kafka.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -525,6 +463,7 @@ def _kafka_from_nested(nested: KafkaNested) -> Kafka: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -533,9 +472,6 @@ def _kafka_from_nested(nested: KafkaNested) -> Kafka: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_kafka_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/kafka_consumer_group.py b/pyatlan_v9/model/assets/kafka_consumer_group.py index c7ed07234..88055c821 100644 --- a/pyatlan_v9/model/assets/kafka_consumer_group.py +++ b/pyatlan_v9/model/assets/kafka_consumer_group.py @@ -42,7 +42,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .kafka_related import RelatedKafkaConsumerGroup, RelatedKafkaTopic +from .kafka_related import RelatedKafkaTopic from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -98,6 +98,8 @@ class KafkaConsumerGroup(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "KafkaConsumerGroup" + kafka_consumer_group_topic_consumption_properties: Union[ List[Dict[str, Any]], None, UnsetType ] = UNSET @@ -218,76 +220,6 @@ def __post_init__(self) -> None: r"^.+/consumer-group/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this KafkaConsumerGroup instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.kafka_topics is UNSET: - errors.append("kafka_topics is required for creation") - if self.kafka_topic_names is UNSET: - errors.append("kafka_topic_names is required for creation") - if self.kafka_topic_qualified_names is UNSET: - errors.append("kafka_topic_qualified_names is required for creation") - if errors: - raise ValueError(f"KafkaConsumerGroup validation failed: {errors}") - - def minimize(self) -> "KafkaConsumerGroup": - """ - Return a minimal copy of this KafkaConsumerGroup with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new KafkaConsumerGroup with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new KafkaConsumerGroup instance with only the minimum required fields. - """ - self.validate() - return KafkaConsumerGroup(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedKafkaConsumerGroup": - """ - Create a :class:`RelatedKafkaConsumerGroup` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedKafkaConsumerGroup reference to this asset. - """ - if self.guid is not UNSET: - return RelatedKafkaConsumerGroup(guid=self.guid) - return RelatedKafkaConsumerGroup(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -612,9 +544,6 @@ def _kafka_consumer_group_to_nested( is_incomplete=kafka_consumer_group.is_incomplete, provenance_type=kafka_consumer_group.provenance_type, home_id=kafka_consumer_group.home_id, - depth=kafka_consumer_group.depth, - immediate_upstream=kafka_consumer_group.immediate_upstream, - immediate_downstream=kafka_consumer_group.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -650,6 +579,7 @@ def _kafka_consumer_group_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -658,9 +588,6 @@ def _kafka_consumer_group_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_kafka_consumer_group_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/kafka_related.py b/pyatlan_v9/model/assets/kafka_related.py index 41aeb55a0..41bbc0436 100644 --- a/pyatlan_v9/model/assets/kafka_related.py +++ b/pyatlan_v9/model/assets/kafka_related.py @@ -126,7 +126,7 @@ class RelatedAzureEventHub(RelatedKafka): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "AzureEventHub" so it serializes correctly - azure_event_hub_status: Union[str, None, UnsetType] = UNSET + kafka_status: Union[str, None, UnsetType] = UNSET """""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/kafka_topic.py b/pyatlan_v9/model/assets/kafka_topic.py index b84b019f1..53f2cd625 100644 --- a/pyatlan_v9/model/assets/kafka_topic.py +++ b/pyatlan_v9/model/assets/kafka_topic.py @@ -42,7 +42,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .kafka_related import RelatedKafkaConsumerGroup, RelatedKafkaTopic +from .kafka_related import RelatedKafkaConsumerGroup from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -104,6 +104,8 @@ class KafkaTopic(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "KafkaTopic" + kafka_topic_is_internal: Union[bool, None, UnsetType] = UNSET """Whether this topic is an internal topic (true) or not (false).""" @@ -240,67 +242,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/topic/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this KafkaTopic instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if errors: - raise ValueError(f"KafkaTopic validation failed: {errors}") - - def minimize(self) -> "KafkaTopic": - """ - Return a minimal copy of this KafkaTopic with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new KafkaTopic with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new KafkaTopic instance with only the minimum required fields. - """ - self.validate() - return KafkaTopic(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedKafkaTopic": - """ - Create a :class:`RelatedKafkaTopic` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedKafkaTopic reference to this asset. - """ - if self.guid is not UNSET: - return RelatedKafkaTopic(guid=self.guid) - return RelatedKafkaTopic(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -633,9 +574,6 @@ def _kafka_topic_to_nested(kafka_topic: KafkaTopic) -> KafkaTopicNested: is_incomplete=kafka_topic.is_incomplete, provenance_type=kafka_topic.provenance_type, home_id=kafka_topic.home_id, - depth=kafka_topic.depth, - immediate_upstream=kafka_topic.immediate_upstream, - immediate_downstream=kafka_topic.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -667,6 +605,7 @@ def _kafka_topic_from_nested(nested: KafkaTopicNested) -> KafkaTopic: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -675,9 +614,6 @@ def _kafka_topic_from_nested(nested: KafkaTopicNested) -> KafkaTopic: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_kafka_topic_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/link.py b/pyatlan_v9/model/assets/link.py index 03477edb7..da0c76491 100644 --- a/pyatlan_v9/model/assets/link.py +++ b/pyatlan_v9/model/assets/link.py @@ -98,6 +98,8 @@ class Link(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Link" + icon: Union[str, None, UnsetType] = UNSET """Icon for the link.""" @@ -214,66 +216,6 @@ class Link(Asset): def __post_init__(self) -> None: self.type_name = "Link" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Link instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Link validation failed: {errors}") - - def minimize(self) -> "Link": - """ - Return a minimal copy of this Link with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Link with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Link instance with only the minimum required fields. - """ - self.validate() - return Link(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLink": - """ - Create a :class:`RelatedLink` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLink reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLink(guid=self.guid) - return RelatedLink(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -550,9 +492,6 @@ def _link_to_nested(link: Link) -> LinkNested: is_incomplete=link.is_incomplete, provenance_type=link.provenance_type, home_id=link.home_id, - depth=link.depth, - immediate_upstream=link.immediate_upstream, - immediate_downstream=link.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -582,6 +521,7 @@ def _link_from_nested(nested: LinkNested) -> Link: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -590,9 +530,6 @@ def _link_from_nested(nested: LinkNested) -> Link: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_link_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker.py b/pyatlan_v9/model/assets/looker.py index ab9c3afef..d38edf1fc 100644 --- a/pyatlan_v9/model/assets/looker.py +++ b/pyatlan_v9/model/assets/looker.py @@ -40,7 +40,6 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .looker_related import RelatedLooker from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -92,6 +91,8 @@ class Looker(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Looker" + looker_slug: Union[str, None, UnsetType] = UNSET """An alpha-numeric slug for the underlying Looker asset that can be used to uniquely identify it""" @@ -190,66 +191,6 @@ class Looker(Asset): def __post_init__(self) -> None: self.type_name = "Looker" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Looker instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Looker validation failed: {errors}") - - def minimize(self) -> "Looker": - """ - Return a minimal copy of this Looker with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Looker with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Looker instance with only the minimum required fields. - """ - self.validate() - return Looker(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLooker": - """ - Create a :class:`RelatedLooker` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLooker reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLooker(guid=self.guid) - return RelatedLooker(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -501,9 +442,6 @@ def _looker_to_nested(looker: Looker) -> LookerNested: is_incomplete=looker.is_incomplete, provenance_type=looker.provenance_type, home_id=looker.home_id, - depth=looker.depth, - immediate_upstream=looker.immediate_upstream, - immediate_downstream=looker.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -533,6 +471,7 @@ def _looker_from_nested(nested: LookerNested) -> Looker: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -541,9 +480,6 @@ def _looker_from_nested(nested: LookerNested) -> Looker: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_dashboard.py b/pyatlan_v9/model/assets/looker_dashboard.py index b5224c5fd..654241ac2 100644 --- a/pyatlan_v9/model/assets/looker_dashboard.py +++ b/pyatlan_v9/model/assets/looker_dashboard.py @@ -42,7 +42,6 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .looker_related import ( - RelatedLookerDashboard, RelatedLookerField, RelatedLookerFolder, RelatedLookerLook, @@ -110,6 +109,8 @@ class LookerDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerDashboard" + folder_name: Union[str, None, UnsetType] = UNSET """Name of the parent folder in Looker that contains this dashboard.""" @@ -247,74 +248,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.folder is UNSET: - errors.append("folder is required for creation") - if self.folder_name is UNSET: - errors.append("folder_name is required for creation") - if errors: - raise ValueError(f"LookerDashboard validation failed: {errors}") - - def minimize(self) -> "LookerDashboard": - """ - Return a minimal copy of this LookerDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerDashboard instance with only the minimum required fields. - """ - self.validate() - return LookerDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerDashboard": - """ - Create a :class:`RelatedLookerDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerDashboard(guid=self.guid) - return RelatedLookerDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -627,9 +560,6 @@ def _looker_dashboard_to_nested( is_incomplete=looker_dashboard.is_incomplete, provenance_type=looker_dashboard.provenance_type, home_id=looker_dashboard.home_id, - depth=looker_dashboard.depth, - immediate_upstream=looker_dashboard.immediate_upstream, - immediate_downstream=looker_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -663,6 +593,7 @@ def _looker_dashboard_from_nested(nested: LookerDashboardNested) -> LookerDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -671,9 +602,6 @@ def _looker_dashboard_from_nested(nested: LookerDashboardNested) -> LookerDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_explore.py b/pyatlan_v9/model/assets/looker_explore.py index 9cf525d28..64a3645be 100644 --- a/pyatlan_v9/model/assets/looker_explore.py +++ b/pyatlan_v9/model/assets/looker_explore.py @@ -41,12 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .looker_related import ( - RelatedLookerExplore, - RelatedLookerField, - RelatedLookerModel, - RelatedLookerProject, -) +from .looker_related import RelatedLookerField, RelatedLookerModel, RelatedLookerProject from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -106,6 +101,8 @@ class LookerExplore(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerExplore" + project_name: Union[str, None, UnsetType] = UNSET """Name of the parent project of this Explore.""" @@ -236,76 +233,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerExplore instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model is UNSET: - errors.append("model is required for creation") - if self.model_name is UNSET: - errors.append("model_name is required for creation") - if self.project_name is UNSET: - errors.append("project_name is required for creation") - if errors: - raise ValueError(f"LookerExplore validation failed: {errors}") - - def minimize(self) -> "LookerExplore": - """ - Return a minimal copy of this LookerExplore with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerExplore with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerExplore instance with only the minimum required fields. - """ - self.validate() - return LookerExplore(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerExplore": - """ - Create a :class:`RelatedLookerExplore` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerExplore reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerExplore(guid=self.guid) - return RelatedLookerExplore(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -598,9 +525,6 @@ def _looker_explore_to_nested(looker_explore: LookerExplore) -> LookerExploreNes is_incomplete=looker_explore.is_incomplete, provenance_type=looker_explore.provenance_type, home_id=looker_explore.home_id, - depth=looker_explore.depth, - immediate_upstream=looker_explore.immediate_upstream, - immediate_downstream=looker_explore.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -634,6 +558,7 @@ def _looker_explore_from_nested(nested: LookerExploreNested) -> LookerExplore: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -642,9 +567,6 @@ def _looker_explore_from_nested(nested: LookerExploreNested) -> LookerExplore: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_explore_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_field.py b/pyatlan_v9/model/assets/looker_field.py index 6133bcf0c..dde662950 100644 --- a/pyatlan_v9/model/assets/looker_field.py +++ b/pyatlan_v9/model/assets/looker_field.py @@ -44,7 +44,6 @@ from .looker_related import ( RelatedLookerDashboard, RelatedLookerExplore, - RelatedLookerField, RelatedLookerLook, RelatedLookerModel, RelatedLookerProject, @@ -122,6 +121,8 @@ class LookerField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerField" + project_name: Union[str, None, UnsetType] = UNSET """Name of the project in which this field exists.""" @@ -288,76 +289,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model is UNSET: - errors.append("model is required for creation") - if self.model_name is UNSET: - errors.append("model_name is required for creation") - if self.project_name is UNSET: - errors.append("project_name is required for creation") - if errors: - raise ValueError(f"LookerField validation failed: {errors}") - - def minimize(self) -> "LookerField": - """ - Return a minimal copy of this LookerField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerField instance with only the minimum required fields. - """ - self.validate() - return LookerField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerField": - """ - Create a :class:`RelatedLookerField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerField(guid=self.guid) - return RelatedLookerField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -708,9 +639,6 @@ def _looker_field_to_nested(looker_field: LookerField) -> LookerFieldNested: is_incomplete=looker_field.is_incomplete, provenance_type=looker_field.provenance_type, home_id=looker_field.home_id, - depth=looker_field.depth, - immediate_upstream=looker_field.immediate_upstream, - immediate_downstream=looker_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -742,6 +670,7 @@ def _looker_field_from_nested(nested: LookerFieldNested) -> LookerField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -750,9 +679,6 @@ def _looker_field_from_nested(nested: LookerFieldNested) -> LookerField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_folder.py b/pyatlan_v9/model/assets/looker_folder.py index 0209fdd02..0d5d202f8 100644 --- a/pyatlan_v9/model/assets/looker_folder.py +++ b/pyatlan_v9/model/assets/looker_folder.py @@ -105,6 +105,8 @@ class LookerFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerFolder" + source_content_metadata_id: Union[int, None, UnsetType] = UNSET """Identifier for the folder's content metadata in Looker.""" @@ -229,66 +231,6 @@ class LookerFolder(Asset): def __post_init__(self) -> None: self.type_name = "LookerFolder" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"LookerFolder validation failed: {errors}") - - def minimize(self) -> "LookerFolder": - """ - Return a minimal copy of this LookerFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerFolder instance with only the minimum required fields. - """ - self.validate() - return LookerFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerFolder": - """ - Create a :class:`RelatedLookerFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerFolder(guid=self.guid) - return RelatedLookerFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -582,9 +524,6 @@ def _looker_folder_to_nested(looker_folder: LookerFolder) -> LookerFolderNested: is_incomplete=looker_folder.is_incomplete, provenance_type=looker_folder.provenance_type, home_id=looker_folder.home_id, - depth=looker_folder.depth, - immediate_upstream=looker_folder.immediate_upstream, - immediate_downstream=looker_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -618,6 +557,7 @@ def _looker_folder_from_nested(nested: LookerFolderNested) -> LookerFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -626,9 +566,6 @@ def _looker_folder_from_nested(nested: LookerFolderNested) -> LookerFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_folder_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_look.py b/pyatlan_v9/model/assets/looker_look.py index 40984227c..4d17da335 100644 --- a/pyatlan_v9/model/assets/looker_look.py +++ b/pyatlan_v9/model/assets/looker_look.py @@ -45,7 +45,6 @@ RelatedLookerDashboard, RelatedLookerField, RelatedLookerFolder, - RelatedLookerLook, RelatedLookerModel, RelatedLookerQuery, RelatedLookerTile, @@ -117,6 +116,8 @@ class LookerLook(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerLook" + folder_name: Union[str, None, UnsetType] = UNSET """Name of the folder in which the Look is organized.""" @@ -269,74 +270,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerLook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.folder is UNSET: - errors.append("folder is required for creation") - if self.folder_name is UNSET: - errors.append("folder_name is required for creation") - if errors: - raise ValueError(f"LookerLook validation failed: {errors}") - - def minimize(self) -> "LookerLook": - """ - Return a minimal copy of this LookerLook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerLook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerLook instance with only the minimum required fields. - """ - self.validate() - return LookerLook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerLook": - """ - Create a :class:`RelatedLookerLook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerLook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerLook(guid=self.guid) - return RelatedLookerLook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -662,9 +595,6 @@ def _looker_look_to_nested(looker_look: LookerLook) -> LookerLookNested: is_incomplete=looker_look.is_incomplete, provenance_type=looker_look.provenance_type, home_id=looker_look.home_id, - depth=looker_look.depth, - immediate_upstream=looker_look.immediate_upstream, - immediate_downstream=looker_look.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -696,6 +626,7 @@ def _looker_look_from_nested(nested: LookerLookNested) -> LookerLook: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -704,9 +635,6 @@ def _looker_look_from_nested(nested: LookerLookNested) -> LookerLook: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_look_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_model.py b/pyatlan_v9/model/assets/looker_model.py index ce6406fd9..9137cee5f 100644 --- a/pyatlan_v9/model/assets/looker_model.py +++ b/pyatlan_v9/model/assets/looker_model.py @@ -45,7 +45,6 @@ RelatedLookerExplore, RelatedLookerField, RelatedLookerLook, - RelatedLookerModel, RelatedLookerProject, RelatedLookerQuery, ) @@ -106,6 +105,8 @@ class LookerModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerModel" + project_name: Union[str, None, UnsetType] = UNSET """Name of the project in which the model exists.""" @@ -228,74 +229,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_name is UNSET: - errors.append("project_name is required for creation") - if errors: - raise ValueError(f"LookerModel validation failed: {errors}") - - def minimize(self) -> "LookerModel": - """ - Return a minimal copy of this LookerModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerModel instance with only the minimum required fields. - """ - self.validate() - return LookerModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerModel": - """ - Create a :class:`RelatedLookerModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerModel(guid=self.guid) - return RelatedLookerModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -574,9 +507,6 @@ def _looker_model_to_nested(looker_model: LookerModel) -> LookerModelNested: is_incomplete=looker_model.is_incomplete, provenance_type=looker_model.provenance_type, home_id=looker_model.home_id, - depth=looker_model.depth, - immediate_upstream=looker_model.immediate_upstream, - immediate_downstream=looker_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -608,6 +538,7 @@ def _looker_model_from_nested(nested: LookerModelNested) -> LookerModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -616,9 +547,6 @@ def _looker_model_from_nested(nested: LookerModelNested) -> LookerModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_project.py b/pyatlan_v9/model/assets/looker_project.py index bf1af586e..8f7d32749 100644 --- a/pyatlan_v9/model/assets/looker_project.py +++ b/pyatlan_v9/model/assets/looker_project.py @@ -104,6 +104,8 @@ class LookerProject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerProject" + looker_slug: Union[str, None, UnsetType] = UNSET """An alpha-numeric slug for the underlying Looker asset that can be used to uniquely identify it""" @@ -220,66 +222,6 @@ class LookerProject(Asset): def __post_init__(self) -> None: self.type_name = "LookerProject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"LookerProject validation failed: {errors}") - - def minimize(self) -> "LookerProject": - """ - Return a minimal copy of this LookerProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerProject instance with only the minimum required fields. - """ - self.validate() - return LookerProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerProject": - """ - Create a :class:`RelatedLookerProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerProject(guid=self.guid) - return RelatedLookerProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -559,9 +501,6 @@ def _looker_project_to_nested(looker_project: LookerProject) -> LookerProjectNes is_incomplete=looker_project.is_incomplete, provenance_type=looker_project.provenance_type, home_id=looker_project.home_id, - depth=looker_project.depth, - immediate_upstream=looker_project.immediate_upstream, - immediate_downstream=looker_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -595,6 +534,7 @@ def _looker_project_from_nested(nested: LookerProjectNested) -> LookerProject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -603,9 +543,6 @@ def _looker_project_from_nested(nested: LookerProjectNested) -> LookerProject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_project_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_query.py b/pyatlan_v9/model/assets/looker_query.py index 1950cd400..322fe2944 100644 --- a/pyatlan_v9/model/assets/looker_query.py +++ b/pyatlan_v9/model/assets/looker_query.py @@ -41,12 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .looker_related import ( - RelatedLookerLook, - RelatedLookerModel, - RelatedLookerQuery, - RelatedLookerTile, -) +from .looker_related import RelatedLookerLook, RelatedLookerModel, RelatedLookerTile from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -105,6 +100,8 @@ class LookerQuery(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerQuery" + source_definition: Union[str, None, UnsetType] = UNSET """Deprecated.""" @@ -232,72 +229,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerQuery instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model is UNSET: - errors.append("model is required for creation") - if errors: - raise ValueError(f"LookerQuery validation failed: {errors}") - - def minimize(self) -> "LookerQuery": - """ - Return a minimal copy of this LookerQuery with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerQuery with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerQuery instance with only the minimum required fields. - """ - self.validate() - return LookerQuery(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerQuery": - """ - Create a :class:`RelatedLookerQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerQuery(guid=self.guid) - return RelatedLookerQuery(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -583,9 +514,6 @@ def _looker_query_to_nested(looker_query: LookerQuery) -> LookerQueryNested: is_incomplete=looker_query.is_incomplete, provenance_type=looker_query.provenance_type, home_id=looker_query.home_id, - depth=looker_query.depth, - immediate_upstream=looker_query.immediate_upstream, - immediate_downstream=looker_query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -617,6 +545,7 @@ def _looker_query_from_nested(nested: LookerQueryNested) -> LookerQuery: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -625,9 +554,6 @@ def _looker_query_from_nested(nested: LookerQueryNested) -> LookerQuery: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_query_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_related.py b/pyatlan_v9/model/assets/looker_related.py index 564e486ac..a0fd46b13 100644 --- a/pyatlan_v9/model/assets/looker_related.py +++ b/pyatlan_v9/model/assets/looker_related.py @@ -134,7 +134,7 @@ class RelatedLookerView(RelatedLooker): looker_view_file_path: Union[str, None, UnsetType] = UNSET """File path of this view within the project.""" - looker_view_file_name: Union[str, None, UnsetType] = UNSET + looker_file_name: Union[str, None, UnsetType] = UNSET """File name of this view.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/looker_tile.py b/pyatlan_v9/model/assets/looker_tile.py index 5da4fa6c0..3234f1e19 100644 --- a/pyatlan_v9/model/assets/looker_tile.py +++ b/pyatlan_v9/model/assets/looker_tile.py @@ -47,7 +47,6 @@ RelatedLookerField, RelatedLookerLook, RelatedLookerQuery, - RelatedLookerTile, ) from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -112,6 +111,8 @@ class LookerTile(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerTile" + lookml_link_id: Union[str, None, UnsetType] = UNSET """Identifier for the LoomML link.""" @@ -258,72 +259,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerTile instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dashboard is UNSET: - errors.append("dashboard is required for creation") - if errors: - raise ValueError(f"LookerTile validation failed: {errors}") - - def minimize(self) -> "LookerTile": - """ - Return a minimal copy of this LookerTile with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerTile with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerTile instance with only the minimum required fields. - """ - self.validate() - return LookerTile(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerTile": - """ - Create a :class:`RelatedLookerTile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerTile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerTile(guid=self.guid) - return RelatedLookerTile(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -635,9 +570,6 @@ def _looker_tile_to_nested(looker_tile: LookerTile) -> LookerTileNested: is_incomplete=looker_tile.is_incomplete, provenance_type=looker_tile.provenance_type, home_id=looker_tile.home_id, - depth=looker_tile.depth, - immediate_upstream=looker_tile.immediate_upstream, - immediate_downstream=looker_tile.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -669,6 +601,7 @@ def _looker_tile_from_nested(nested: LookerTileNested) -> LookerTile: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -677,9 +610,6 @@ def _looker_tile_from_nested(nested: LookerTileNested) -> LookerTile: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_tile_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/looker_view.py b/pyatlan_v9/model/assets/looker_view.py index dab411ac2..6d4e89bd2 100644 --- a/pyatlan_v9/model/assets/looker_view.py +++ b/pyatlan_v9/model/assets/looker_view.py @@ -41,7 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .looker_related import RelatedLookerField, RelatedLookerProject, RelatedLookerView +from .looker_related import RelatedLookerField, RelatedLookerProject from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -65,7 +65,7 @@ class LookerView(Asset): PROJECT_NAME: ClassVar[Any] = None LOOKER_VIEW_FILE_PATH: ClassVar[Any] = None - LOOKER_VIEW_FILE_NAME: ClassVar[Any] = None + LOOKER_FILE_NAME: ClassVar[Any] = None LOOKER_SLUG: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None @@ -98,13 +98,15 @@ class LookerView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerView" + project_name: Union[str, None, UnsetType] = UNSET """Name of the project in which this view exists.""" looker_view_file_path: Union[str, None, UnsetType] = UNSET """File path of this view within the project.""" - looker_view_file_name: Union[str, None, UnsetType] = UNSET + looker_file_name: Union[str, None, UnsetType] = UNSET """File name of this view.""" looker_slug: Union[str, None, UnsetType] = UNSET @@ -217,74 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_name is UNSET: - errors.append("project_name is required for creation") - if errors: - raise ValueError(f"LookerView validation failed: {errors}") - - def minimize(self) -> "LookerView": - """ - Return a minimal copy of this LookerView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerView instance with only the minimum required fields. - """ - self.validate() - return LookerView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerView": - """ - Create a :class:`RelatedLookerView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerView(guid=self.guid) - return RelatedLookerView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -346,7 +280,7 @@ class LookerViewAttributes(AssetAttributes): looker_view_file_path: Union[str, None, UnsetType] = UNSET """File path of this view within the project.""" - looker_view_file_name: Union[str, None, UnsetType] = UNSET + looker_file_name: Union[str, None, UnsetType] = UNSET """File name of this view.""" looker_slug: Union[str, None, UnsetType] = UNSET @@ -512,7 +446,7 @@ def _populate_looker_view_attrs(attrs: LookerViewAttributes, obj: LookerView) -> _populate_asset_attrs(attrs, obj) attrs.project_name = obj.project_name attrs.looker_view_file_path = obj.looker_view_file_path - attrs.looker_view_file_name = obj.looker_view_file_name + attrs.looker_file_name = obj.looker_file_name attrs.looker_slug = obj.looker_slug @@ -521,7 +455,7 @@ def _extract_looker_view_attrs(attrs: LookerViewAttributes) -> dict: result = _extract_asset_attrs(attrs) result["project_name"] = attrs.project_name result["looker_view_file_path"] = attrs.looker_view_file_path - result["looker_view_file_name"] = attrs.looker_view_file_name + result["looker_file_name"] = attrs.looker_file_name result["looker_slug"] = attrs.looker_slug return result @@ -559,9 +493,6 @@ def _looker_view_to_nested(looker_view: LookerView) -> LookerViewNested: is_incomplete=looker_view.is_incomplete, provenance_type=looker_view.provenance_type, home_id=looker_view.home_id, - depth=looker_view.depth, - immediate_upstream=looker_view.immediate_upstream, - immediate_downstream=looker_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -593,6 +524,7 @@ def _looker_view_from_nested(nested: LookerViewNested) -> LookerView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -601,9 +533,6 @@ def _looker_view_from_nested(nested: LookerViewNested) -> LookerView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -630,9 +559,7 @@ def _looker_view_from_nested_bytes(data: bytes, serde: Serde) -> LookerView: LookerView.LOOKER_VIEW_FILE_PATH = KeywordField( "lookerViewFilePath", "lookerViewFilePath" ) -LookerView.LOOKER_VIEW_FILE_NAME = KeywordField( - "lookerViewFileName", "lookerViewFileName" -) +LookerView.LOOKER_FILE_NAME = KeywordField("lookerFileName", "lookerFileName") LookerView.LOOKER_SLUG = KeywordField("lookerSlug", "lookerSlug") LookerView.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") LookerView.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") diff --git a/pyatlan_v9/model/assets/materialised_view.py b/pyatlan_v9/model/assets/materialised_view.py index 2eef815d5..265214767 100644 --- a/pyatlan_v9/model/assets/materialised_view.py +++ b/pyatlan_v9/model/assets/materialised_view.py @@ -59,7 +59,7 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_related import RelatedColumn, RelatedMaterialisedView, RelatedSchema +from .sql_related import RelatedColumn, RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -140,6 +140,8 @@ class MaterialisedView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MaterialisedView" + refresh_mode: Union[str, None, UnsetType] = UNSET """Refresh mode for this materialized view.""" @@ -366,80 +368,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MaterialisedView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"MaterialisedView validation failed: {errors}") - - def minimize(self) -> "MaterialisedView": - """ - Return a minimal copy of this MaterialisedView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MaterialisedView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MaterialisedView instance with only the minimum required fields. - """ - self.validate() - return MaterialisedView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMaterialisedView": - """ - Create a :class:`RelatedMaterialisedView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMaterialisedView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMaterialisedView(guid=self.guid) - return RelatedMaterialisedView(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -988,9 +916,6 @@ def _materialised_view_to_nested( is_incomplete=materialised_view.is_incomplete, provenance_type=materialised_view.provenance_type, home_id=materialised_view.home_id, - depth=materialised_view.depth, - immediate_upstream=materialised_view.immediate_upstream, - immediate_downstream=materialised_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1024,6 +949,7 @@ def _materialised_view_from_nested(nested: MaterialisedViewNested) -> Materialis updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1032,9 +958,6 @@ def _materialised_view_from_nested(nested: MaterialisedViewNested) -> Materialis is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_materialised_view_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/matillion.py b/pyatlan_v9/model/assets/matillion.py index 01d29803d..2d90ec01c 100644 --- a/pyatlan_v9/model/assets/matillion.py +++ b/pyatlan_v9/model/assets/matillion.py @@ -40,7 +40,6 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import RelatedMatillion from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -92,6 +91,8 @@ class Matillion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Matillion" + matillion_version: Union[str, None, UnsetType] = UNSET """Current point in time state of a project.""" @@ -190,66 +191,6 @@ class Matillion(Asset): def __post_init__(self) -> None: self.type_name = "Matillion" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Matillion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Matillion validation failed: {errors}") - - def minimize(self) -> "Matillion": - """ - Return a minimal copy of this Matillion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Matillion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Matillion instance with only the minimum required fields. - """ - self.validate() - return Matillion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillion": - """ - Create a :class:`RelatedMatillion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillion(guid=self.guid) - return RelatedMatillion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -501,9 +442,6 @@ def _matillion_to_nested(matillion: Matillion) -> MatillionNested: is_incomplete=matillion.is_incomplete, provenance_type=matillion.provenance_type, home_id=matillion.home_id, - depth=matillion.depth, - immediate_upstream=matillion.immediate_upstream, - immediate_downstream=matillion.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -535,6 +473,7 @@ def _matillion_from_nested(nested: MatillionNested) -> Matillion: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -543,9 +482,6 @@ def _matillion_from_nested(nested: MatillionNested) -> Matillion: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/matillion_component.py b/pyatlan_v9/model/assets/matillion_component.py index 0ea4d5576..2867b8dbb 100644 --- a/pyatlan_v9/model/assets/matillion_component.py +++ b/pyatlan_v9/model/assets/matillion_component.py @@ -41,7 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import RelatedMatillionComponent, RelatedMatillionJob +from .matillion_related import RelatedMatillionJob from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -103,6 +103,8 @@ class MatillionComponent(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MatillionComponent" + matillion_component_id: Union[str, None, UnsetType] = UNSET """Unique identifier of the component in Matillion.""" @@ -239,76 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MatillionComponent instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.matillion_job is UNSET: - errors.append("matillion_job is required for creation") - if self.matillion_job_name is UNSET: - errors.append("matillion_job_name is required for creation") - if self.matillion_job_qualified_name is UNSET: - errors.append("matillion_job_qualified_name is required for creation") - if errors: - raise ValueError(f"MatillionComponent validation failed: {errors}") - - def minimize(self) -> "MatillionComponent": - """ - Return a minimal copy of this MatillionComponent with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MatillionComponent with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MatillionComponent instance with only the minimum required fields. - """ - self.validate() - return MatillionComponent(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillionComponent": - """ - Create a :class:`RelatedMatillionComponent` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillionComponent reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillionComponent(guid=self.guid) - return RelatedMatillionComponent(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -628,9 +560,6 @@ def _matillion_component_to_nested( is_incomplete=matillion_component.is_incomplete, provenance_type=matillion_component.provenance_type, home_id=matillion_component.home_id, - depth=matillion_component.depth, - immediate_upstream=matillion_component.immediate_upstream, - immediate_downstream=matillion_component.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -666,6 +595,7 @@ def _matillion_component_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -674,9 +604,6 @@ def _matillion_component_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_component_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/matillion_group.py b/pyatlan_v9/model/assets/matillion_group.py index 68466704d..2f751f675 100644 --- a/pyatlan_v9/model/assets/matillion_group.py +++ b/pyatlan_v9/model/assets/matillion_group.py @@ -40,7 +40,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import RelatedMatillionGroup, RelatedMatillionProject +from .matillion_related import RelatedMatillionProject from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -94,6 +94,8 @@ class MatillionGroup(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MatillionGroup" + matillion_project_count: Union[int, None, UnsetType] = UNSET """Number of projects within the group.""" @@ -198,66 +200,6 @@ class MatillionGroup(Asset): def __post_init__(self) -> None: self.type_name = "MatillionGroup" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MatillionGroup instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MatillionGroup validation failed: {errors}") - - def minimize(self) -> "MatillionGroup": - """ - Return a minimal copy of this MatillionGroup with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MatillionGroup with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MatillionGroup instance with only the minimum required fields. - """ - self.validate() - return MatillionGroup(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillionGroup": - """ - Create a :class:`RelatedMatillionGroup` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillionGroup reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillionGroup(guid=self.guid) - return RelatedMatillionGroup(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -524,9 +466,6 @@ def _matillion_group_to_nested(matillion_group: MatillionGroup) -> MatillionGrou is_incomplete=matillion_group.is_incomplete, provenance_type=matillion_group.provenance_type, home_id=matillion_group.home_id, - depth=matillion_group.depth, - immediate_upstream=matillion_group.immediate_upstream, - immediate_downstream=matillion_group.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -560,6 +499,7 @@ def _matillion_group_from_nested(nested: MatillionGroupNested) -> MatillionGroup updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -568,9 +508,6 @@ def _matillion_group_from_nested(nested: MatillionGroupNested) -> MatillionGroup is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_group_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/matillion_job.py b/pyatlan_v9/model/assets/matillion_job.py index 0ce9a2779..5e96358dd 100644 --- a/pyatlan_v9/model/assets/matillion_job.py +++ b/pyatlan_v9/model/assets/matillion_job.py @@ -41,11 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import ( - RelatedMatillionComponent, - RelatedMatillionJob, - RelatedMatillionProject, -) +from .matillion_related import RelatedMatillionComponent, RelatedMatillionProject from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -105,6 +101,8 @@ class MatillionJob(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MatillionJob" + matillion_job_type: Union[str, None, UnsetType] = UNSET """Type of the job, for example: orchestration or transformation.""" @@ -237,78 +235,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MatillionJob instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.matillion_project is UNSET: - errors.append("matillion_project is required for creation") - if self.matillion_project_name is UNSET: - errors.append("matillion_project_name is required for creation") - if self.matillion_project_qualified_name is UNSET: - errors.append( - "matillion_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MatillionJob validation failed: {errors}") - - def minimize(self) -> "MatillionJob": - """ - Return a minimal copy of this MatillionJob with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MatillionJob with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MatillionJob instance with only the minimum required fields. - """ - self.validate() - return MatillionJob(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillionJob": - """ - Create a :class:`RelatedMatillionJob` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillionJob reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillionJob(guid=self.guid) - return RelatedMatillionJob(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -604,9 +530,6 @@ def _matillion_job_to_nested(matillion_job: MatillionJob) -> MatillionJobNested: is_incomplete=matillion_job.is_incomplete, provenance_type=matillion_job.provenance_type, home_id=matillion_job.home_id, - depth=matillion_job.depth, - immediate_upstream=matillion_job.immediate_upstream, - immediate_downstream=matillion_job.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -640,6 +563,7 @@ def _matillion_job_from_nested(nested: MatillionJobNested) -> MatillionJob: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -648,9 +572,6 @@ def _matillion_job_from_nested(nested: MatillionJobNested) -> MatillionJob: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_job_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/matillion_project.py b/pyatlan_v9/model/assets/matillion_project.py index ea8de88af..50af4af39 100644 --- a/pyatlan_v9/model/assets/matillion_project.py +++ b/pyatlan_v9/model/assets/matillion_project.py @@ -41,11 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import ( - RelatedMatillionGroup, - RelatedMatillionJob, - RelatedMatillionProject, -) +from .matillion_related import RelatedMatillionGroup, RelatedMatillionJob from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -104,6 +100,8 @@ class MatillionProject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MatillionProject" + matillion_versions: Union[List[str], None, UnsetType] = UNSET """List of versions in the project.""" @@ -229,76 +227,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MatillionProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.matillion_group is UNSET: - errors.append("matillion_group is required for creation") - if self.matillion_group_name is UNSET: - errors.append("matillion_group_name is required for creation") - if self.matillion_group_qualified_name is UNSET: - errors.append("matillion_group_qualified_name is required for creation") - if errors: - raise ValueError(f"MatillionProject validation failed: {errors}") - - def minimize(self) -> "MatillionProject": - """ - Return a minimal copy of this MatillionProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MatillionProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MatillionProject instance with only the minimum required fields. - """ - self.validate() - return MatillionProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillionProject": - """ - Create a :class:`RelatedMatillionProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillionProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillionProject(guid=self.guid) - return RelatedMatillionProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -593,9 +521,6 @@ def _matillion_project_to_nested( is_incomplete=matillion_project.is_incomplete, provenance_type=matillion_project.provenance_type, home_id=matillion_project.home_id, - depth=matillion_project.depth, - immediate_upstream=matillion_project.immediate_upstream, - immediate_downstream=matillion_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -629,6 +554,7 @@ def _matillion_project_from_nested(nested: MatillionProjectNested) -> MatillionP updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -637,9 +563,6 @@ def _matillion_project_from_nested(nested: MatillionProjectNested) -> MatillionP is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_project_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mc_incident.py b/pyatlan_v9/model/assets/mc_incident.py index 73a776521..c27a8680b 100644 --- a/pyatlan_v9/model/assets/mc_incident.py +++ b/pyatlan_v9/model/assets/mc_incident.py @@ -104,6 +104,8 @@ class MCIncident(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MCIncident" + mc_incident_id: Union[str, None, UnsetType] = UNSET """Identifier of this incident, from Monte Carlo.""" @@ -241,72 +243,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MCIncident instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mc_monitor is UNSET: - errors.append("mc_monitor is required for creation") - if errors: - raise ValueError(f"MCIncident validation failed: {errors}") - - def minimize(self) -> "MCIncident": - """ - Return a minimal copy of this MCIncident with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MCIncident with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MCIncident instance with only the minimum required fields. - """ - self.validate() - return MCIncident(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMCIncident": - """ - Create a :class:`RelatedMCIncident` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMCIncident reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMCIncident(guid=self.guid) - return RelatedMCIncident(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -611,9 +547,6 @@ def _mc_incident_to_nested(mc_incident: MCIncident) -> MCIncidentNested: is_incomplete=mc_incident.is_incomplete, provenance_type=mc_incident.provenance_type, home_id=mc_incident.home_id, - depth=mc_incident.depth, - immediate_upstream=mc_incident.immediate_upstream, - immediate_downstream=mc_incident.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -645,6 +578,7 @@ def _mc_incident_from_nested(nested: MCIncidentNested) -> MCIncident: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -653,9 +587,6 @@ def _mc_incident_from_nested(nested: MCIncidentNested) -> MCIncident: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mc_incident_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mc_monitor.py b/pyatlan_v9/model/assets/mc_monitor.py index a1b8a1e6a..7d6197e2a 100644 --- a/pyatlan_v9/model/assets/mc_monitor.py +++ b/pyatlan_v9/model/assets/mc_monitor.py @@ -116,6 +116,8 @@ class MCMonitor(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MCMonitor" + mc_monitor_id: Union[str, None, UnsetType] = UNSET """Unique identifier for this monitor, from Monte Carlo.""" @@ -286,66 +288,6 @@ class MCMonitor(Asset): def __post_init__(self) -> None: self.type_name = "MCMonitor" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MCMonitor instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MCMonitor validation failed: {errors}") - - def minimize(self) -> "MCMonitor": - """ - Return a minimal copy of this MCMonitor with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MCMonitor with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MCMonitor instance with only the minimum required fields. - """ - self.validate() - return MCMonitor(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMCMonitor": - """ - Create a :class:`RelatedMCMonitor` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMCMonitor reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMCMonitor(guid=self.guid) - return RelatedMCMonitor(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -726,9 +668,6 @@ def _mc_monitor_to_nested(mc_monitor: MCMonitor) -> MCMonitorNested: is_incomplete=mc_monitor.is_incomplete, provenance_type=mc_monitor.provenance_type, home_id=mc_monitor.home_id, - depth=mc_monitor.depth, - immediate_upstream=mc_monitor.immediate_upstream, - immediate_downstream=mc_monitor.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -760,6 +699,7 @@ def _mc_monitor_from_nested(nested: MCMonitorNested) -> MCMonitor: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -768,9 +708,6 @@ def _mc_monitor_from_nested(nested: MCMonitorNested) -> MCMonitor: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mc_monitor_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/metabase.py b/pyatlan_v9/model/assets/metabase.py index 360f5df0b..676921c89 100644 --- a/pyatlan_v9/model/assets/metabase.py +++ b/pyatlan_v9/model/assets/metabase.py @@ -40,7 +40,6 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .metabase_related import RelatedMetabase from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -93,6 +92,8 @@ class Metabase(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Metabase" + metabase_collection_name: Union[str, None, UnsetType] = UNSET """Simple name of the Metabase collection in which this asset exists.""" @@ -194,66 +195,6 @@ class Metabase(Asset): def __post_init__(self) -> None: self.type_name = "Metabase" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Metabase instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Metabase validation failed: {errors}") - - def minimize(self) -> "Metabase": - """ - Return a minimal copy of this Metabase with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Metabase with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Metabase instance with only the minimum required fields. - """ - self.validate() - return Metabase(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetabase": - """ - Create a :class:`RelatedMetabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetabase(guid=self.guid) - return RelatedMetabase(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -512,9 +453,6 @@ def _metabase_to_nested(metabase: Metabase) -> MetabaseNested: is_incomplete=metabase.is_incomplete, provenance_type=metabase.provenance_type, home_id=metabase.home_id, - depth=metabase.depth, - immediate_upstream=metabase.immediate_upstream, - immediate_downstream=metabase.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -546,6 +484,7 @@ def _metabase_from_nested(nested: MetabaseNested) -> Metabase: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -554,9 +493,6 @@ def _metabase_from_nested(nested: MetabaseNested) -> Metabase: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metabase_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/metabase_collection.py b/pyatlan_v9/model/assets/metabase_collection.py index fe3941dda..8b4a98914 100644 --- a/pyatlan_v9/model/assets/metabase_collection.py +++ b/pyatlan_v9/model/assets/metabase_collection.py @@ -40,11 +40,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .metabase_related import ( - RelatedMetabaseCollection, - RelatedMetabaseDashboard, - RelatedMetabaseQuestion, -) +from .metabase_related import RelatedMetabaseDashboard, RelatedMetabaseQuestion from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -103,6 +99,8 @@ class MetabaseCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MetabaseCollection" + metabase_slug: Union[str, None, UnsetType] = UNSET """""" @@ -222,66 +220,6 @@ class MetabaseCollection(Asset): def __post_init__(self) -> None: self.type_name = "MetabaseCollection" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MetabaseCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MetabaseCollection validation failed: {errors}") - - def minimize(self) -> "MetabaseCollection": - """ - Return a minimal copy of this MetabaseCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MetabaseCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MetabaseCollection instance with only the minimum required fields. - """ - self.validate() - return MetabaseCollection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetabaseCollection": - """ - Create a :class:`RelatedMetabaseCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetabaseCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetabaseCollection(guid=self.guid) - return RelatedMetabaseCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -578,9 +516,6 @@ def _metabase_collection_to_nested( is_incomplete=metabase_collection.is_incomplete, provenance_type=metabase_collection.provenance_type, home_id=metabase_collection.home_id, - depth=metabase_collection.depth, - immediate_upstream=metabase_collection.immediate_upstream, - immediate_downstream=metabase_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -616,6 +551,7 @@ def _metabase_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -624,9 +560,6 @@ def _metabase_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metabase_collection_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/metabase_dashboard.py b/pyatlan_v9/model/assets/metabase_dashboard.py index 3a2c16b5b..a075e2244 100644 --- a/pyatlan_v9/model/assets/metabase_dashboard.py +++ b/pyatlan_v9/model/assets/metabase_dashboard.py @@ -41,11 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .metabase_related import ( - RelatedMetabaseCollection, - RelatedMetabaseDashboard, - RelatedMetabaseQuestion, -) +from .metabase_related import RelatedMetabaseCollection, RelatedMetabaseQuestion from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -101,6 +97,8 @@ class MetabaseDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MetabaseDashboard" + metabase_question_count: Union[int, None, UnsetType] = UNSET """""" @@ -217,78 +215,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MetabaseDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.metabase_collection is UNSET: - errors.append("metabase_collection is required for creation") - if self.metabase_collection_name is UNSET: - errors.append("metabase_collection_name is required for creation") - if self.metabase_collection_qualified_name is UNSET: - errors.append( - "metabase_collection_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MetabaseDashboard validation failed: {errors}") - - def minimize(self) -> "MetabaseDashboard": - """ - Return a minimal copy of this MetabaseDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MetabaseDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MetabaseDashboard instance with only the minimum required fields. - """ - self.validate() - return MetabaseDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetabaseDashboard": - """ - Create a :class:`RelatedMetabaseDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetabaseDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetabaseDashboard(guid=self.guid) - return RelatedMetabaseDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -570,9 +496,6 @@ def _metabase_dashboard_to_nested( is_incomplete=metabase_dashboard.is_incomplete, provenance_type=metabase_dashboard.provenance_type, home_id=metabase_dashboard.home_id, - depth=metabase_dashboard.depth, - immediate_upstream=metabase_dashboard.immediate_upstream, - immediate_downstream=metabase_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -608,6 +531,7 @@ def _metabase_dashboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -616,9 +540,6 @@ def _metabase_dashboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metabase_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/metabase_question.py b/pyatlan_v9/model/assets/metabase_question.py index 35dcce1e1..5be411e62 100644 --- a/pyatlan_v9/model/assets/metabase_question.py +++ b/pyatlan_v9/model/assets/metabase_question.py @@ -41,11 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .metabase_related import ( - RelatedMetabaseCollection, - RelatedMetabaseDashboard, - RelatedMetabaseQuestion, -) +from .metabase_related import RelatedMetabaseCollection, RelatedMetabaseDashboard from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -103,6 +99,8 @@ class MetabaseQuestion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MetabaseQuestion" + metabase_dashboard_count: Union[int, None, UnsetType] = UNSET """""" @@ -225,78 +223,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MetabaseQuestion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.metabase_collection is UNSET: - errors.append("metabase_collection is required for creation") - if self.metabase_collection_name is UNSET: - errors.append("metabase_collection_name is required for creation") - if self.metabase_collection_qualified_name is UNSET: - errors.append( - "metabase_collection_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MetabaseQuestion validation failed: {errors}") - - def minimize(self) -> "MetabaseQuestion": - """ - Return a minimal copy of this MetabaseQuestion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MetabaseQuestion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MetabaseQuestion instance with only the minimum required fields. - """ - self.validate() - return MetabaseQuestion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetabaseQuestion": - """ - Create a :class:`RelatedMetabaseQuestion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetabaseQuestion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetabaseQuestion(guid=self.guid) - return RelatedMetabaseQuestion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -588,9 +514,6 @@ def _metabase_question_to_nested( is_incomplete=metabase_question.is_incomplete, provenance_type=metabase_question.provenance_type, home_id=metabase_question.home_id, - depth=metabase_question.depth, - immediate_upstream=metabase_question.immediate_upstream, - immediate_downstream=metabase_question.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -624,6 +547,7 @@ def _metabase_question_from_nested(nested: MetabaseQuestionNested) -> MetabaseQu updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -632,9 +556,6 @@ def _metabase_question_from_nested(nested: MetabaseQuestionNested) -> MetabaseQu is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metabase_question_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/metric.py b/pyatlan_v9/model/assets/metric.py index a90455bbc..1f3bfb3d2 100644 --- a/pyatlan_v9/model/assets/metric.py +++ b/pyatlan_v9/model/assets/metric.py @@ -101,6 +101,8 @@ class Metric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Metric" + metric_type: Union[str, None, UnsetType] = UNSET """Type of the metric.""" @@ -222,66 +224,6 @@ class Metric(Asset): def __post_init__(self) -> None: self.type_name = "Metric" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Metric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Metric validation failed: {errors}") - - def minimize(self) -> "Metric": - """ - Return a minimal copy of this Metric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Metric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Metric instance with only the minimum required fields. - """ - self.validate() - return Metric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetric": - """ - Create a :class:`RelatedMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetric(guid=self.guid) - return RelatedMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -567,9 +509,6 @@ def _metric_to_nested(metric: Metric) -> MetricNested: is_incomplete=metric.is_incomplete, provenance_type=metric.provenance_type, home_id=metric.home_id, - depth=metric.depth, - immediate_upstream=metric.immediate_upstream, - immediate_downstream=metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -599,6 +538,7 @@ def _metric_from_nested(nested: MetricNested) -> Metric: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -607,9 +547,6 @@ def _metric_from_nested(nested: MetricNested) -> Metric: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metric_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy.py b/pyatlan_v9/model/assets/micro_strategy.py index 27326e588..81190df6c 100644 --- a/pyatlan_v9/model/assets/micro_strategy.py +++ b/pyatlan_v9/model/assets/micro_strategy.py @@ -40,7 +40,6 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .micro_strategy_related import RelatedMicroStrategy from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -101,6 +100,8 @@ class MicroStrategy(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategy" + micro_strategy_project_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the project in which this asset exists.""" @@ -226,66 +227,6 @@ class MicroStrategy(Asset): def __post_init__(self) -> None: self.type_name = "MicroStrategy" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategy instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MicroStrategy validation failed: {errors}") - - def minimize(self) -> "MicroStrategy": - """ - Return a minimal copy of this MicroStrategy with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategy with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategy instance with only the minimum required fields. - """ - self.validate() - return MicroStrategy(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategy": - """ - Create a :class:`RelatedMicroStrategy` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategy reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategy(guid=self.guid) - return RelatedMicroStrategy(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -596,9 +537,6 @@ def _micro_strategy_to_nested(micro_strategy: MicroStrategy) -> MicroStrategyNes is_incomplete=micro_strategy.is_incomplete, provenance_type=micro_strategy.provenance_type, home_id=micro_strategy.home_id, - depth=micro_strategy.depth, - immediate_upstream=micro_strategy.immediate_upstream, - immediate_downstream=micro_strategy.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -632,6 +570,7 @@ def _micro_strategy_from_nested(nested: MicroStrategyNested) -> MicroStrategy: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -640,9 +579,6 @@ def _micro_strategy_from_nested(nested: MicroStrategyNested) -> MicroStrategy: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_attribute.py b/pyatlan_v9/model/assets/micro_strategy_attribute.py index 23fb5bdd5..421de6d5c 100644 --- a/pyatlan_v9/model/assets/micro_strategy_attribute.py +++ b/pyatlan_v9/model/assets/micro_strategy_attribute.py @@ -42,7 +42,6 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( - RelatedMicroStrategyAttribute, RelatedMicroStrategyColumn, RelatedMicroStrategyCube, RelatedMicroStrategyMetric, @@ -115,6 +114,8 @@ class MicroStrategyAttribute(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyAttribute" + micro_strategy_attribute_forms: Union[str, None, UnsetType] = UNSET """JSON string specifying the attribute's name, description, displayFormat, etc.""" @@ -270,80 +271,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyAttribute instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyAttribute validation failed: {errors}") - - def minimize(self) -> "MicroStrategyAttribute": - """ - Return a minimal copy of this MicroStrategyAttribute with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyAttribute with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyAttribute instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyAttribute( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedMicroStrategyAttribute": - """ - Create a :class:`RelatedMicroStrategyAttribute` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyAttribute reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyAttribute(guid=self.guid) - return RelatedMicroStrategyAttribute(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -693,9 +620,6 @@ def _micro_strategy_attribute_to_nested( is_incomplete=micro_strategy_attribute.is_incomplete, provenance_type=micro_strategy_attribute.provenance_type, home_id=micro_strategy_attribute.home_id, - depth=micro_strategy_attribute.depth, - immediate_upstream=micro_strategy_attribute.immediate_upstream, - immediate_downstream=micro_strategy_attribute.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -731,6 +655,7 @@ def _micro_strategy_attribute_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -739,9 +664,6 @@ def _micro_strategy_attribute_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_attribute_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_column.py b/pyatlan_v9/model/assets/micro_strategy_column.py index 3216e4e5a..97db9c6f6 100644 --- a/pyatlan_v9/model/assets/micro_strategy_column.py +++ b/pyatlan_v9/model/assets/micro_strategy_column.py @@ -43,7 +43,6 @@ from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyAttribute, - RelatedMicroStrategyColumn, RelatedMicroStrategyCube, RelatedMicroStrategyDocument, RelatedMicroStrategyDossier, @@ -130,6 +129,8 @@ class MicroStrategyColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyColumn" + micro_strategy_column_id: Union[str, None, UnsetType] = UNSET """Unique identifier of the column in MicroStrategy.""" @@ -324,78 +325,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_attribute is UNSET: - errors.append("micro_strategy_attribute is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyColumn validation failed: {errors}") - - def minimize(self) -> "MicroStrategyColumn": - """ - Return a minimal copy of this MicroStrategyColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyColumn instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyColumn": - """ - Create a :class:`RelatedMicroStrategyColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyColumn(guid=self.guid) - return RelatedMicroStrategyColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -832,9 +761,6 @@ def _micro_strategy_column_to_nested( is_incomplete=micro_strategy_column.is_incomplete, provenance_type=micro_strategy_column.provenance_type, home_id=micro_strategy_column.home_id, - depth=micro_strategy_column.depth, - immediate_upstream=micro_strategy_column.immediate_upstream, - immediate_downstream=micro_strategy_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -870,6 +796,7 @@ def _micro_strategy_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -878,9 +805,6 @@ def _micro_strategy_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_cube.py b/pyatlan_v9/model/assets/micro_strategy_cube.py index fe9f93568..f8325efe8 100644 --- a/pyatlan_v9/model/assets/micro_strategy_cube.py +++ b/pyatlan_v9/model/assets/micro_strategy_cube.py @@ -44,7 +44,6 @@ from .micro_strategy_related import ( RelatedMicroStrategyAttribute, RelatedMicroStrategyColumn, - RelatedMicroStrategyCube, RelatedMicroStrategyMetric, RelatedMicroStrategyProject, ) @@ -114,6 +113,8 @@ class MicroStrategyCube(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyCube" + micro_strategy_cube_type: Union[str, None, UnsetType] = UNSET """Type of cube, for example: OLAP or MTDI.""" @@ -269,78 +270,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyCube instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyCube validation failed: {errors}") - - def minimize(self) -> "MicroStrategyCube": - """ - Return a minimal copy of this MicroStrategyCube with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyCube with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyCube instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyCube(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyCube": - """ - Create a :class:`RelatedMicroStrategyCube` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyCube reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyCube(guid=self.guid) - return RelatedMicroStrategyCube(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -689,9 +618,6 @@ def _micro_strategy_cube_to_nested( is_incomplete=micro_strategy_cube.is_incomplete, provenance_type=micro_strategy_cube.provenance_type, home_id=micro_strategy_cube.home_id, - depth=micro_strategy_cube.depth, - immediate_upstream=micro_strategy_cube.immediate_upstream, - immediate_downstream=micro_strategy_cube.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -727,6 +653,7 @@ def _micro_strategy_cube_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -735,9 +662,6 @@ def _micro_strategy_cube_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_cube_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_document.py b/pyatlan_v9/model/assets/micro_strategy_document.py index 128b5b016..2c593e29a 100644 --- a/pyatlan_v9/model/assets/micro_strategy_document.py +++ b/pyatlan_v9/model/assets/micro_strategy_document.py @@ -43,7 +43,6 @@ from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyColumn, - RelatedMicroStrategyDocument, RelatedMicroStrategyProject, ) from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -108,6 +107,8 @@ class MicroStrategyDocument(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyDocument" + micro_strategy_project_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the project in which this asset exists.""" @@ -247,78 +248,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyDocument instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyDocument validation failed: {errors}") - - def minimize(self) -> "MicroStrategyDocument": - """ - Return a minimal copy of this MicroStrategyDocument with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyDocument with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyDocument instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyDocument(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyDocument": - """ - Create a :class:`RelatedMicroStrategyDocument` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyDocument reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyDocument(guid=self.guid) - return RelatedMicroStrategyDocument(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -647,9 +576,6 @@ def _micro_strategy_document_to_nested( is_incomplete=micro_strategy_document.is_incomplete, provenance_type=micro_strategy_document.provenance_type, home_id=micro_strategy_document.home_id, - depth=micro_strategy_document.depth, - immediate_upstream=micro_strategy_document.immediate_upstream, - immediate_downstream=micro_strategy_document.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -685,6 +611,7 @@ def _micro_strategy_document_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -693,9 +620,6 @@ def _micro_strategy_document_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_document_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_dossier.py b/pyatlan_v9/model/assets/micro_strategy_dossier.py index d3b5b05a0..776601b38 100644 --- a/pyatlan_v9/model/assets/micro_strategy_dossier.py +++ b/pyatlan_v9/model/assets/micro_strategy_dossier.py @@ -43,7 +43,6 @@ from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyColumn, - RelatedMicroStrategyDossier, RelatedMicroStrategyProject, RelatedMicroStrategyVisualization, ) @@ -111,6 +110,8 @@ class MicroStrategyDossier(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyDossier" + micro_strategy_dossier_chapter_names: Union[List[str], None, UnsetType] = UNSET """List of chapter names in this dossier.""" @@ -258,78 +259,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyDossier instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyDossier validation failed: {errors}") - - def minimize(self) -> "MicroStrategyDossier": - """ - Return a minimal copy of this MicroStrategyDossier with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyDossier with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyDossier instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyDossier(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyDossier": - """ - Create a :class:`RelatedMicroStrategyDossier` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyDossier reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyDossier(guid=self.guid) - return RelatedMicroStrategyDossier(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -673,9 +602,6 @@ def _micro_strategy_dossier_to_nested( is_incomplete=micro_strategy_dossier.is_incomplete, provenance_type=micro_strategy_dossier.provenance_type, home_id=micro_strategy_dossier.home_id, - depth=micro_strategy_dossier.depth, - immediate_upstream=micro_strategy_dossier.immediate_upstream, - immediate_downstream=micro_strategy_dossier.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -711,6 +637,7 @@ def _micro_strategy_dossier_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -719,9 +646,6 @@ def _micro_strategy_dossier_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_dossier_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_fact.py b/pyatlan_v9/model/assets/micro_strategy_fact.py index 0614aa3f4..2f28ec63c 100644 --- a/pyatlan_v9/model/assets/micro_strategy_fact.py +++ b/pyatlan_v9/model/assets/micro_strategy_fact.py @@ -43,7 +43,6 @@ from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyColumn, - RelatedMicroStrategyFact, RelatedMicroStrategyMetric, RelatedMicroStrategyProject, ) @@ -111,6 +110,8 @@ class MicroStrategyFact(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyFact" + micro_strategy_fact_expressions: Union[List[str], None, UnsetType] = UNSET """List of expressions for this fact.""" @@ -258,78 +259,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyFact instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyFact validation failed: {errors}") - - def minimize(self) -> "MicroStrategyFact": - """ - Return a minimal copy of this MicroStrategyFact with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyFact with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyFact instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyFact(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyFact": - """ - Create a :class:`RelatedMicroStrategyFact` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyFact reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyFact(guid=self.guid) - return RelatedMicroStrategyFact(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -667,9 +596,6 @@ def _micro_strategy_fact_to_nested( is_incomplete=micro_strategy_fact.is_incomplete, provenance_type=micro_strategy_fact.provenance_type, home_id=micro_strategy_fact.home_id, - depth=micro_strategy_fact.depth, - immediate_upstream=micro_strategy_fact.immediate_upstream, - immediate_downstream=micro_strategy_fact.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -705,6 +631,7 @@ def _micro_strategy_fact_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -713,9 +640,6 @@ def _micro_strategy_fact_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_fact_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_metric.py b/pyatlan_v9/model/assets/micro_strategy_metric.py index 3c7d0be05..373b9aab7 100644 --- a/pyatlan_v9/model/assets/micro_strategy_metric.py +++ b/pyatlan_v9/model/assets/micro_strategy_metric.py @@ -125,6 +125,8 @@ class MicroStrategyMetric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyMetric" + micro_strategy_metric_expression: Union[str, None, UnsetType] = UNSET """Text specifiying this metric's expression.""" @@ -313,78 +315,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyMetric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyMetric validation failed: {errors}") - - def minimize(self) -> "MicroStrategyMetric": - """ - Return a minimal copy of this MicroStrategyMetric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyMetric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyMetric instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyMetric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyMetric": - """ - Create a :class:`RelatedMicroStrategyMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyMetric(guid=self.guid) - return RelatedMicroStrategyMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -792,9 +722,6 @@ def _micro_strategy_metric_to_nested( is_incomplete=micro_strategy_metric.is_incomplete, provenance_type=micro_strategy_metric.provenance_type, home_id=micro_strategy_metric.home_id, - depth=micro_strategy_metric.depth, - immediate_upstream=micro_strategy_metric.immediate_upstream, - immediate_downstream=micro_strategy_metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -830,6 +757,7 @@ def _micro_strategy_metric_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -838,9 +766,6 @@ def _micro_strategy_metric_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_metric_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_project.py b/pyatlan_v9/model/assets/micro_strategy_project.py index 290f3b282..3230c5af6 100644 --- a/pyatlan_v9/model/assets/micro_strategy_project.py +++ b/pyatlan_v9/model/assets/micro_strategy_project.py @@ -47,7 +47,6 @@ RelatedMicroStrategyDossier, RelatedMicroStrategyFact, RelatedMicroStrategyMetric, - RelatedMicroStrategyProject, RelatedMicroStrategyReport, RelatedMicroStrategyVisualization, ) @@ -119,6 +118,8 @@ class MicroStrategyProject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyProject" + micro_strategy_project_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the project in which this asset exists.""" @@ -280,66 +281,6 @@ class MicroStrategyProject(Asset): def __post_init__(self) -> None: self.type_name = "MicroStrategyProject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MicroStrategyProject validation failed: {errors}") - - def minimize(self) -> "MicroStrategyProject": - """ - Return a minimal copy of this MicroStrategyProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyProject instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyProject": - """ - Create a :class:`RelatedMicroStrategyProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyProject(guid=self.guid) - return RelatedMicroStrategyProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -702,9 +643,6 @@ def _micro_strategy_project_to_nested( is_incomplete=micro_strategy_project.is_incomplete, provenance_type=micro_strategy_project.provenance_type, home_id=micro_strategy_project.home_id, - depth=micro_strategy_project.depth, - immediate_upstream=micro_strategy_project.immediate_upstream, - immediate_downstream=micro_strategy_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -740,6 +678,7 @@ def _micro_strategy_project_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -748,9 +687,6 @@ def _micro_strategy_project_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_project_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_report.py b/pyatlan_v9/model/assets/micro_strategy_report.py index e11802697..893f3e5d2 100644 --- a/pyatlan_v9/model/assets/micro_strategy_report.py +++ b/pyatlan_v9/model/assets/micro_strategy_report.py @@ -46,7 +46,6 @@ RelatedMicroStrategyColumn, RelatedMicroStrategyMetric, RelatedMicroStrategyProject, - RelatedMicroStrategyReport, ) from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -113,6 +112,8 @@ class MicroStrategyReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyReport" + micro_strategy_report_type: Union[str, None, UnsetType] = UNSET """Type of report, for example: Grid or Chart.""" @@ -265,78 +266,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyReport validation failed: {errors}") - - def minimize(self) -> "MicroStrategyReport": - """ - Return a minimal copy of this MicroStrategyReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyReport instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyReport": - """ - Create a :class:`RelatedMicroStrategyReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyReport(guid=self.guid) - return RelatedMicroStrategyReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -680,9 +609,6 @@ def _micro_strategy_report_to_nested( is_incomplete=micro_strategy_report.is_incomplete, provenance_type=micro_strategy_report.provenance_type, home_id=micro_strategy_report.home_id, - depth=micro_strategy_report.depth, - immediate_upstream=micro_strategy_report.immediate_upstream, - immediate_downstream=micro_strategy_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -718,6 +644,7 @@ def _micro_strategy_report_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -726,9 +653,6 @@ def _micro_strategy_report_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_report_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/micro_strategy_visualization.py b/pyatlan_v9/model/assets/micro_strategy_visualization.py index 0ec0d61e7..a51a96a70 100644 --- a/pyatlan_v9/model/assets/micro_strategy_visualization.py +++ b/pyatlan_v9/model/assets/micro_strategy_visualization.py @@ -44,7 +44,6 @@ from .micro_strategy_related import ( RelatedMicroStrategyDossier, RelatedMicroStrategyProject, - RelatedMicroStrategyVisualization, ) from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -111,6 +110,8 @@ class MicroStrategyVisualization(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyVisualization" + micro_strategy_visualization_type: Union[str, None, UnsetType] = UNSET """Type of visualization.""" @@ -257,80 +258,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyVisualization instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyVisualization validation failed: {errors}") - - def minimize(self) -> "MicroStrategyVisualization": - """ - Return a minimal copy of this MicroStrategyVisualization with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyVisualization with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyVisualization instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyVisualization( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedMicroStrategyVisualization": - """ - Create a :class:`RelatedMicroStrategyVisualization` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyVisualization reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyVisualization(guid=self.guid) - return RelatedMicroStrategyVisualization(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -678,9 +605,6 @@ def _micro_strategy_visualization_to_nested( is_incomplete=micro_strategy_visualization.is_incomplete, provenance_type=micro_strategy_visualization.provenance_type, home_id=micro_strategy_visualization.home_id, - depth=micro_strategy_visualization.depth, - immediate_upstream=micro_strategy_visualization.immediate_upstream, - immediate_downstream=micro_strategy_visualization.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -716,6 +640,7 @@ def _micro_strategy_visualization_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -724,9 +649,6 @@ def _micro_strategy_visualization_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_visualization_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mode.py b/pyatlan_v9/model/assets/mode.py index edd396d24..9b37c70d2 100644 --- a/pyatlan_v9/model/assets/mode.py +++ b/pyatlan_v9/model/assets/mode.py @@ -40,7 +40,6 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedMode from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -100,6 +99,8 @@ class Mode(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Mode" + mode_id: Union[str, None, UnsetType] = UNSET """Unique identifier for the Mode asset.""" @@ -222,66 +223,6 @@ class Mode(Asset): def __post_init__(self) -> None: self.type_name = "Mode" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Mode instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Mode validation failed: {errors}") - - def minimize(self) -> "Mode": - """ - Return a minimal copy of this Mode with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Mode with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Mode instance with only the minimum required fields. - """ - self.validate() - return Mode(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMode": - """ - Create a :class:`RelatedMode` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMode reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMode(guid=self.guid) - return RelatedMode(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -569,9 +510,6 @@ def _mode_to_nested(mode: Mode) -> ModeNested: is_incomplete=mode.is_incomplete, provenance_type=mode.provenance_type, home_id=mode.home_id, - depth=mode.depth, - immediate_upstream=mode.immediate_upstream, - immediate_downstream=mode.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -601,6 +539,7 @@ def _mode_from_nested(nested: ModeNested) -> Mode: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -609,9 +548,6 @@ def _mode_from_nested(nested: ModeNested) -> Mode: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mode_chart.py b/pyatlan_v9/model/assets/mode_chart.py index 95afce3c9..956fe6cef 100644 --- a/pyatlan_v9/model/assets/mode_chart.py +++ b/pyatlan_v9/model/assets/mode_chart.py @@ -41,7 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeChart, RelatedModeQuery +from .mode_related import RelatedModeQuery from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -103,6 +103,8 @@ class ModeChart(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeChart" + mode_chart_type: Union[str, None, UnsetType] = UNSET """Type of chart.""" @@ -239,84 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeChart instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mode_query is UNSET: - errors.append("mode_query is required for creation") - if self.mode_query_name is UNSET: - errors.append("mode_query_name is required for creation") - if self.mode_query_qualified_name is UNSET: - errors.append("mode_query_qualified_name is required for creation") - if self.mode_report_name is UNSET: - errors.append("mode_report_name is required for creation") - if self.mode_report_qualified_name is UNSET: - errors.append("mode_report_qualified_name is required for creation") - if self.mode_workspace_name is UNSET: - errors.append("mode_workspace_name is required for creation") - if self.mode_workspace_qualified_name is UNSET: - errors.append("mode_workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"ModeChart validation failed: {errors}") - - def minimize(self) -> "ModeChart": - """ - Return a minimal copy of this ModeChart with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeChart with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeChart instance with only the minimum required fields. - """ - self.validate() - return ModeChart(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeChart": - """ - Create a :class:`RelatedModeChart` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeChart reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeChart(guid=self.guid) - return RelatedModeChart(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -617,9 +541,6 @@ def _mode_chart_to_nested(mode_chart: ModeChart) -> ModeChartNested: is_incomplete=mode_chart.is_incomplete, provenance_type=mode_chart.provenance_type, home_id=mode_chart.home_id, - depth=mode_chart.depth, - immediate_upstream=mode_chart.immediate_upstream, - immediate_downstream=mode_chart.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -651,6 +572,7 @@ def _mode_chart_from_nested(nested: ModeChartNested) -> ModeChart: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -659,9 +581,6 @@ def _mode_chart_from_nested(nested: ModeChartNested) -> ModeChart: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_chart_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mode_collection.py b/pyatlan_v9/model/assets/mode_collection.py index 8cef22be1..3b1726e50 100644 --- a/pyatlan_v9/model/assets/mode_collection.py +++ b/pyatlan_v9/model/assets/mode_collection.py @@ -41,7 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeCollection, RelatedModeReport, RelatedModeWorkspace +from .mode_related import RelatedModeReport, RelatedModeWorkspace from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -105,6 +105,8 @@ class ModeCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeCollection" + mode_collection_type: Union[str, None, UnsetType] = UNSET """Type of this collection.""" @@ -245,76 +247,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mode_workspace is UNSET: - errors.append("mode_workspace is required for creation") - if self.mode_workspace_name is UNSET: - errors.append("mode_workspace_name is required for creation") - if self.mode_workspace_qualified_name is UNSET: - errors.append("mode_workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"ModeCollection validation failed: {errors}") - - def minimize(self) -> "ModeCollection": - """ - Return a minimal copy of this ModeCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeCollection instance with only the minimum required fields. - """ - self.validate() - return ModeCollection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeCollection": - """ - Create a :class:`RelatedModeCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeCollection(guid=self.guid) - return RelatedModeCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -630,9 +562,6 @@ def _mode_collection_to_nested(mode_collection: ModeCollection) -> ModeCollectio is_incomplete=mode_collection.is_incomplete, provenance_type=mode_collection.provenance_type, home_id=mode_collection.home_id, - depth=mode_collection.depth, - immediate_upstream=mode_collection.immediate_upstream, - immediate_downstream=mode_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -666,6 +595,7 @@ def _mode_collection_from_nested(nested: ModeCollectionNested) -> ModeCollection updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -674,9 +604,6 @@ def _mode_collection_from_nested(nested: ModeCollectionNested) -> ModeCollection is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_collection_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mode_query.py b/pyatlan_v9/model/assets/mode_query.py index e6b6530e1..b274523b7 100644 --- a/pyatlan_v9/model/assets/mode_query.py +++ b/pyatlan_v9/model/assets/mode_query.py @@ -41,7 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeChart, RelatedModeQuery, RelatedModeReport +from .mode_related import RelatedModeChart, RelatedModeReport from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -105,6 +105,8 @@ class ModeQuery(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeQuery" + mode_raw_query: Union[str, None, UnsetType] = UNSET """Raw query for the Mode asset.""" @@ -247,80 +249,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeQuery instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mode_report is UNSET: - errors.append("mode_report is required for creation") - if self.mode_report_name is UNSET: - errors.append("mode_report_name is required for creation") - if self.mode_report_qualified_name is UNSET: - errors.append("mode_report_qualified_name is required for creation") - if self.mode_workspace_name is UNSET: - errors.append("mode_workspace_name is required for creation") - if self.mode_workspace_qualified_name is UNSET: - errors.append("mode_workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"ModeQuery validation failed: {errors}") - - def minimize(self) -> "ModeQuery": - """ - Return a minimal copy of this ModeQuery with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeQuery with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeQuery instance with only the minimum required fields. - """ - self.validate() - return ModeQuery(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeQuery": - """ - Create a :class:`RelatedModeQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeQuery(guid=self.guid) - return RelatedModeQuery(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -630,9 +558,6 @@ def _mode_query_to_nested(mode_query: ModeQuery) -> ModeQueryNested: is_incomplete=mode_query.is_incomplete, provenance_type=mode_query.provenance_type, home_id=mode_query.home_id, - depth=mode_query.depth, - immediate_upstream=mode_query.immediate_upstream, - immediate_downstream=mode_query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -664,6 +589,7 @@ def _mode_query_from_nested(nested: ModeQueryNested) -> ModeQuery: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -672,9 +598,6 @@ def _mode_query_from_nested(nested: ModeQueryNested) -> ModeQuery: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_query_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mode_report.py b/pyatlan_v9/model/assets/mode_report.py index cb9c4ae96..665e1220e 100644 --- a/pyatlan_v9/model/assets/mode_report.py +++ b/pyatlan_v9/model/assets/mode_report.py @@ -41,7 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeCollection, RelatedModeQuery, RelatedModeReport +from .mode_related import RelatedModeCollection, RelatedModeQuery from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -111,6 +111,8 @@ class ModeReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeReport" + mode_collection_token: Union[str, None, UnsetType] = UNSET """Token for the Mode collection.""" @@ -271,76 +273,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mode_collections is UNSET: - errors.append("mode_collections is required for creation") - if self.mode_workspace_name is UNSET: - errors.append("mode_workspace_name is required for creation") - if self.mode_workspace_qualified_name is UNSET: - errors.append("mode_workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"ModeReport validation failed: {errors}") - - def minimize(self) -> "ModeReport": - """ - Return a minimal copy of this ModeReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeReport instance with only the minimum required fields. - """ - self.validate() - return ModeReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeReport": - """ - Create a :class:`RelatedModeReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeReport(guid=self.guid) - return RelatedModeReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -680,9 +612,6 @@ def _mode_report_to_nested(mode_report: ModeReport) -> ModeReportNested: is_incomplete=mode_report.is_incomplete, provenance_type=mode_report.provenance_type, home_id=mode_report.home_id, - depth=mode_report.depth, - immediate_upstream=mode_report.immediate_upstream, - immediate_downstream=mode_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -714,6 +643,7 @@ def _mode_report_from_nested(nested: ModeReportNested) -> ModeReport: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -722,9 +652,6 @@ def _mode_report_from_nested(nested: ModeReportNested) -> ModeReport: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_report_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mode_workspace.py b/pyatlan_v9/model/assets/mode_workspace.py index 6439dae84..7593cf195 100644 --- a/pyatlan_v9/model/assets/mode_workspace.py +++ b/pyatlan_v9/model/assets/mode_workspace.py @@ -40,7 +40,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeCollection, RelatedModeWorkspace +from .mode_related import RelatedModeCollection from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -102,6 +102,8 @@ class ModeWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeWorkspace" + mode_collection_count: Union[int, None, UnsetType] = UNSET """Number of collections in this workspace.""" @@ -230,66 +232,6 @@ class ModeWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "ModeWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ModeWorkspace validation failed: {errors}") - - def minimize(self) -> "ModeWorkspace": - """ - Return a minimal copy of this ModeWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeWorkspace instance with only the minimum required fields. - """ - self.validate() - return ModeWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeWorkspace": - """ - Create a :class:`RelatedModeWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeWorkspace(guid=self.guid) - return RelatedModeWorkspace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -594,9 +536,6 @@ def _mode_workspace_to_nested(mode_workspace: ModeWorkspace) -> ModeWorkspaceNes is_incomplete=mode_workspace.is_incomplete, provenance_type=mode_workspace.provenance_type, home_id=mode_workspace.home_id, - depth=mode_workspace.depth, - immediate_upstream=mode_workspace.immediate_upstream, - immediate_downstream=mode_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -630,6 +569,7 @@ def _mode_workspace_from_nested(nested: ModeWorkspaceNested) -> ModeWorkspace: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -638,9 +578,6 @@ def _mode_workspace_from_nested(nested: ModeWorkspaceNested) -> ModeWorkspace: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/model.py b/pyatlan_v9/model/assets/model.py index 8906dc49d..d92a66b2f 100644 --- a/pyatlan_v9/model/assets/model.py +++ b/pyatlan_v9/model/assets/model.py @@ -40,7 +40,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .model_related import RelatedModel, RelatedModelAttribute, RelatedModelEntity +from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -104,6 +104,8 @@ class Model(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Model" + model_name: Union[str, None, UnsetType] = UNSET """Simple name of the model in which this asset exists, or empty if it is itself a data model.""" @@ -241,66 +243,6 @@ class Model(Asset): def __post_init__(self) -> None: self.type_name = "Model" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Model instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Model validation failed: {errors}") - - def minimize(self) -> "Model": - """ - Return a minimal copy of this Model with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Model with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Model instance with only the minimum required fields. - """ - self.validate() - return Model(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModel": - """ - Create a :class:`RelatedModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModel(guid=self.guid) - return RelatedModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -621,9 +563,6 @@ def _model_to_nested(model: Model) -> ModelNested: is_incomplete=model.is_incomplete, provenance_type=model.provenance_type, home_id=model.home_id, - depth=model.depth, - immediate_upstream=model.immediate_upstream, - immediate_downstream=model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -653,6 +592,7 @@ def _model_from_nested(nested: ModelNested) -> Model: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -661,9 +601,6 @@ def _model_from_nested(nested: ModelNested) -> Model: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/model_attribute.py b/pyatlan_v9/model/assets/model_attribute.py index 6bbf37203..33ed07449 100644 --- a/pyatlan_v9/model/assets/model_attribute.py +++ b/pyatlan_v9/model/assets/model_attribute.py @@ -123,6 +123,8 @@ class ModelAttribute(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelAttribute" + model_attribute_is_nullable: Union[bool, None, UnsetType] = UNSET """When true, the values in this attribute can be null.""" @@ -312,66 +314,6 @@ class ModelAttribute(Asset): def __post_init__(self) -> None: self.type_name = "ModelAttribute" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelAttribute instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ModelAttribute validation failed: {errors}") - - def minimize(self) -> "ModelAttribute": - """ - Return a minimal copy of this ModelAttribute with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelAttribute with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelAttribute instance with only the minimum required fields. - """ - self.validate() - return ModelAttribute(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModelAttribute": - """ - Create a :class:`RelatedModelAttribute` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelAttribute reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelAttribute(guid=self.guid) - return RelatedModelAttribute(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -774,9 +716,6 @@ def _model_attribute_to_nested(model_attribute: ModelAttribute) -> ModelAttribut is_incomplete=model_attribute.is_incomplete, provenance_type=model_attribute.provenance_type, home_id=model_attribute.home_id, - depth=model_attribute.depth, - immediate_upstream=model_attribute.immediate_upstream, - immediate_downstream=model_attribute.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -810,6 +749,7 @@ def _model_attribute_from_nested(nested: ModelAttributeNested) -> ModelAttribute updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -818,9 +758,6 @@ def _model_attribute_from_nested(nested: ModelAttributeNested) -> ModelAttribute is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_attribute_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/model_attribute_association.py b/pyatlan_v9/model/assets/model_attribute_association.py index 1f9889fc8..2882e2228 100644 --- a/pyatlan_v9/model/assets/model_attribute_association.py +++ b/pyatlan_v9/model/assets/model_attribute_association.py @@ -41,11 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .model_related import ( - RelatedModelAttribute, - RelatedModelAttributeAssociation, - RelatedModelEntity, -) +from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -114,6 +110,8 @@ class ModelAttributeAssociation(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelAttributeAssociation" + model_attribute_association_to_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the association to which this attribute is related.""" @@ -276,74 +274,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelAttributeAssociation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model_attribute_association_to is UNSET: - errors.append("model_attribute_association_to is required for creation") - if errors: - raise ValueError(f"ModelAttributeAssociation validation failed: {errors}") - - def minimize(self) -> "ModelAttributeAssociation": - """ - Return a minimal copy of this ModelAttributeAssociation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelAttributeAssociation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelAttributeAssociation instance with only the minimum required fields. - """ - self.validate() - return ModelAttributeAssociation( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedModelAttributeAssociation": - """ - Create a :class:`RelatedModelAttributeAssociation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelAttributeAssociation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelAttributeAssociation(guid=self.guid) - return RelatedModelAttributeAssociation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -715,9 +645,6 @@ def _model_attribute_association_to_nested( is_incomplete=model_attribute_association.is_incomplete, provenance_type=model_attribute_association.provenance_type, home_id=model_attribute_association.home_id, - depth=model_attribute_association.depth, - immediate_upstream=model_attribute_association.immediate_upstream, - immediate_downstream=model_attribute_association.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -753,6 +680,7 @@ def _model_attribute_association_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -761,9 +689,6 @@ def _model_attribute_association_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_attribute_association_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/model_data_model.py b/pyatlan_v9/model/assets/model_data_model.py index 95486f512..37ed8712a 100644 --- a/pyatlan_v9/model/assets/model_data_model.py +++ b/pyatlan_v9/model/assets/model_data_model.py @@ -42,7 +42,6 @@ from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import ( RelatedModelAttribute, - RelatedModelDataModel, RelatedModelEntity, RelatedModelVersion, ) @@ -112,6 +111,8 @@ class ModelDataModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelDataModel" + model_version_count: Union[int, None, UnsetType] = UNSET """Number of versions of the data model.""" @@ -258,66 +259,6 @@ class ModelDataModel(Asset): def __post_init__(self) -> None: self.type_name = "ModelDataModel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelDataModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ModelDataModel validation failed: {errors}") - - def minimize(self) -> "ModelDataModel": - """ - Return a minimal copy of this ModelDataModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelDataModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelDataModel instance with only the minimum required fields. - """ - self.validate() - return ModelDataModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModelDataModel": - """ - Create a :class:`RelatedModelDataModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelDataModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelDataModel(guid=self.guid) - return RelatedModelDataModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -660,9 +601,6 @@ def _model_data_model_to_nested( is_incomplete=model_data_model.is_incomplete, provenance_type=model_data_model.provenance_type, home_id=model_data_model.home_id, - depth=model_data_model.depth, - immediate_upstream=model_data_model.immediate_upstream, - immediate_downstream=model_data_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -696,6 +634,7 @@ def _model_data_model_from_nested(nested: ModelDataModelNested) -> ModelDataMode updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -704,9 +643,6 @@ def _model_data_model_from_nested(nested: ModelDataModelNested) -> ModelDataMode is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_data_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/model_entity.py b/pyatlan_v9/model/assets/model_entity.py index f4a015df6..5480369ea 100644 --- a/pyatlan_v9/model/assets/model_entity.py +++ b/pyatlan_v9/model/assets/model_entity.py @@ -124,6 +124,8 @@ class ModelEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelEntity" + model_entity_attribute_count: Union[int, None, UnsetType] = UNSET """Number of attributes in the entity.""" @@ -320,70 +322,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"ModelEntity validation failed: {errors}") - - def minimize(self) -> "ModelEntity": - """ - Return a minimal copy of this ModelEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelEntity instance with only the minimum required fields. - """ - self.validate() - return ModelEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModelEntity": - """ - Create a :class:`RelatedModelEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelEntity(guid=self.guid) - return RelatedModelEntity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -780,9 +718,6 @@ def _model_entity_to_nested(model_entity: ModelEntity) -> ModelEntityNested: is_incomplete=model_entity.is_incomplete, provenance_type=model_entity.provenance_type, home_id=model_entity.home_id, - depth=model_entity.depth, - immediate_upstream=model_entity.immediate_upstream, - immediate_downstream=model_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -814,6 +749,7 @@ def _model_entity_from_nested(nested: ModelEntityNested) -> ModelEntity: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -822,9 +758,6 @@ def _model_entity_from_nested(nested: ModelEntityNested) -> ModelEntity: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_entity_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/model_entity_association.py b/pyatlan_v9/model/assets/model_entity_association.py index 7a431ac9c..a4801402d 100644 --- a/pyatlan_v9/model/assets/model_entity_association.py +++ b/pyatlan_v9/model/assets/model_entity_association.py @@ -41,11 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm -from .model_related import ( - RelatedModelAttribute, - RelatedModelEntity, - RelatedModelEntityAssociation, -) +from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -121,6 +117,8 @@ class ModelEntityAssociation(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelEntityAssociation" + model_entity_association_cardinality: Union[str, None, UnsetType] = UNSET """(Deprecated) Cardinality of the data entity association.""" @@ -302,78 +300,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelEntityAssociation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model_entity_association_to is UNSET: - errors.append("model_entity_association_to is required for creation") - if self.model_entity_name is UNSET: - errors.append("model_entity_name is required for creation") - if self.model_entity_qualified_name is UNSET: - errors.append("model_entity_qualified_name is required for creation") - if errors: - raise ValueError(f"ModelEntityAssociation validation failed: {errors}") - - def minimize(self) -> "ModelEntityAssociation": - """ - Return a minimal copy of this ModelEntityAssociation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelEntityAssociation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelEntityAssociation instance with only the minimum required fields. - """ - self.validate() - return ModelEntityAssociation( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedModelEntityAssociation": - """ - Create a :class:`RelatedModelEntityAssociation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelEntityAssociation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelEntityAssociation(guid=self.guid) - return RelatedModelEntityAssociation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -796,9 +722,6 @@ def _model_entity_association_to_nested( is_incomplete=model_entity_association.is_incomplete, provenance_type=model_entity_association.provenance_type, home_id=model_entity_association.home_id, - depth=model_entity_association.depth, - immediate_upstream=model_entity_association.immediate_upstream, - immediate_downstream=model_entity_association.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -834,6 +757,7 @@ def _model_entity_association_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -842,9 +766,6 @@ def _model_entity_association_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_entity_association_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/model_version.py b/pyatlan_v9/model/assets/model_version.py index 0bcc810f6..a2e3dfee5 100644 --- a/pyatlan_v9/model/assets/model_version.py +++ b/pyatlan_v9/model/assets/model_version.py @@ -45,7 +45,6 @@ RelatedModelAttribute, RelatedModelDataModel, RelatedModelEntity, - RelatedModelVersion, ) from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -113,6 +112,8 @@ class ModelVersion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelVersion" + model_version_entity_count: Union[int, None, UnsetType] = UNSET """Number of entities in the version.""" @@ -265,72 +266,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model_data_model is UNSET: - errors.append("model_data_model is required for creation") - if errors: - raise ValueError(f"ModelVersion validation failed: {errors}") - - def minimize(self) -> "ModelVersion": - """ - Return a minimal copy of this ModelVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelVersion instance with only the minimum required fields. - """ - self.validate() - return ModelVersion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModelVersion": - """ - Create a :class:`RelatedModelVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelVersion(guid=self.guid) - return RelatedModelVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -668,9 +603,6 @@ def _model_version_to_nested(model_version: ModelVersion) -> ModelVersionNested: is_incomplete=model_version.is_incomplete, provenance_type=model_version.provenance_type, home_id=model_version.home_id, - depth=model_version.depth, - immediate_upstream=model_version.immediate_upstream, - immediate_downstream=model_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -704,6 +636,7 @@ def _model_version_from_nested(nested: ModelVersionNested) -> ModelVersion: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -712,9 +645,6 @@ def _model_version_from_nested(nested: ModelVersionNested) -> ModelVersion: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_version_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mongo_db.py b/pyatlan_v9/model/assets/mongo_db.py index f2e7e6803..16f5a4f80 100644 --- a/pyatlan_v9/model/assets/mongo_db.py +++ b/pyatlan_v9/model/assets/mongo_db.py @@ -42,7 +42,6 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity -from .mongo_db_related import RelatedMongoDB from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -93,6 +92,8 @@ class MongoDB(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MongoDB" + no_sql_schema_definition: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="noSQLSchemaDefinition" ) @@ -193,66 +194,6 @@ class MongoDB(Asset): def __post_init__(self) -> None: self.type_name = "MongoDB" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MongoDB instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MongoDB validation failed: {errors}") - - def minimize(self) -> "MongoDB": - """ - Return a minimal copy of this MongoDB with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MongoDB with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MongoDB instance with only the minimum required fields. - """ - self.validate() - return MongoDB(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMongoDB": - """ - Create a :class:`RelatedMongoDB` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMongoDB reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMongoDB(guid=self.guid) - return RelatedMongoDB(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -506,9 +447,6 @@ def _mongo_db_to_nested(mongo_db: MongoDB) -> MongoDBNested: is_incomplete=mongo_db.is_incomplete, provenance_type=mongo_db.provenance_type, home_id=mongo_db.home_id, - depth=mongo_db.depth, - immediate_upstream=mongo_db.immediate_upstream, - immediate_downstream=mongo_db.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -538,6 +476,7 @@ def _mongo_db_from_nested(nested: MongoDBNested) -> MongoDB: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -546,9 +485,6 @@ def _mongo_db_from_nested(nested: MongoDBNested) -> MongoDB: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mongo_db_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mongo_db_collection.py b/pyatlan_v9/model/assets/mongo_db_collection.py index c5a3dcf66..8af294aeb 100644 --- a/pyatlan_v9/model/assets/mongo_db_collection.py +++ b/pyatlan_v9/model/assets/mongo_db_collection.py @@ -49,7 +49,7 @@ ) from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity -from .mongo_db_related import RelatedMongoDBCollection, RelatedMongoDBDatabase +from .mongo_db_related import RelatedMongoDBDatabase from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -79,16 +79,16 @@ class MongoDBCollection(Asset): """ MONGO_DB_COLLECTION_SUBTYPE: ClassVar[Any] = None - MONGO_DB_COLLECTION_IS_CAPPED: ClassVar[Any] = None + MONGO_DB_IS_CAPPED: ClassVar[Any] = None MONGO_DB_COLLECTION_TIME_FIELD: ClassVar[Any] = None - MONGO_DB_COLLECTION_TIME_GRANULARITY: ClassVar[Any] = None - MONGO_DB_COLLECTION_EXPIRE_AFTER_SECONDS: ClassVar[Any] = None - MONGO_DB_COLLECTION_MAXIMUM_DOCUMENT_COUNT: ClassVar[Any] = None - MONGO_DB_COLLECTION_MAX_SIZE: ClassVar[Any] = None - MONGO_DB_COLLECTION_NUM_ORPHAN_DOCS: ClassVar[Any] = None - MONGO_DB_COLLECTION_NUM_INDEXES: ClassVar[Any] = None - MONGO_DB_COLLECTION_TOTAL_INDEX_SIZE: ClassVar[Any] = None - MONGO_DB_COLLECTION_AVERAGE_OBJECT_SIZE: ClassVar[Any] = None + MONGO_DB_TIME_GRANULARITY: ClassVar[Any] = None + MONGO_DB_EXPIRE_AFTER_SECONDS: ClassVar[Any] = None + MONGO_DB_MAXIMUM_DOCUMENT_COUNT: ClassVar[Any] = None + MONGO_DB_MAX_SIZE: ClassVar[Any] = None + MONGO_DB_NUM_ORPHAN_DOCS: ClassVar[Any] = None + MONGO_DB_NUM_INDEXES: ClassVar[Any] = None + MONGO_DB_TOTAL_INDEX_SIZE: ClassVar[Any] = None + MONGO_DB_AVERAGE_OBJECT_SIZE: ClassVar[Any] = None MONGO_DB_COLLECTION_SCHEMA_DEFINITION: ClassVar[Any] = None NO_SQL_SCHEMA_DEFINITION: ClassVar[Any] = None COLUMN_COUNT: ClassVar[Any] = None @@ -180,13 +180,15 @@ class MongoDBCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MongoDBCollection" + mongo_db_collection_subtype: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="mongoDBCollectionSubtype" ) """Subtype of a MongoDB collection, for example: Capped, Time Series, etc.""" - mongo_db_collection_is_capped: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionIsCapped" + mongo_db_is_capped: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBIsCapped" ) """Whether the collection is capped (true) or not (false).""" @@ -195,43 +197,43 @@ class MongoDBCollection(Asset): ) """Name of the field containing the date in each time series document.""" - mongo_db_collection_time_granularity: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTimeGranularity" + mongo_db_time_granularity: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTimeGranularity" ) """Closest match to the time span between consecutive incoming measurements.""" - mongo_db_collection_expire_after_seconds: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionExpireAfterSeconds") + mongo_db_expire_after_seconds: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBExpireAfterSeconds" ) """Seconds after which documents in a time series collection or clustered collection expire.""" - mongo_db_collection_maximum_document_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionMaximumDocumentCount") + mongo_db_maximum_document_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaximumDocumentCount" ) """Maximum number of documents allowed in a capped collection.""" - mongo_db_collection_max_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionMaxSize" + mongo_db_max_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaxSize" ) """Maximum size allowed in a capped collection.""" - mongo_db_collection_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumOrphanDocs" + mongo_db_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumOrphanDocs" ) """Number of orphaned documents in the collection.""" - mongo_db_collection_num_indexes: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumIndexes" + mongo_db_num_indexes: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumIndexes" ) """Number of indexes on the collection.""" - mongo_db_collection_total_index_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTotalIndexSize" + mongo_db_total_index_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTotalIndexSize" ) """Total size of all indexes.""" - mongo_db_collection_average_object_size: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionAverageObjectSize") + mongo_db_average_object_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBAverageObjectSize" ) """Average size of an object in the collection.""" @@ -536,76 +538,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MongoDBCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mongo_db_database is UNSET: - errors.append("mongo_db_database is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"MongoDBCollection validation failed: {errors}") - - def minimize(self) -> "MongoDBCollection": - """ - Return a minimal copy of this MongoDBCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MongoDBCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MongoDBCollection instance with only the minimum required fields. - """ - self.validate() - return MongoDBCollection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMongoDBCollection": - """ - Create a :class:`RelatedMongoDBCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMongoDBCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMongoDBCollection(guid=self.guid) - return RelatedMongoDBCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -668,8 +600,8 @@ class MongoDBCollectionAttributes(AssetAttributes): ) """Subtype of a MongoDB collection, for example: Capped, Time Series, etc.""" - mongo_db_collection_is_capped: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionIsCapped" + mongo_db_is_capped: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBIsCapped" ) """Whether the collection is capped (true) or not (false).""" @@ -678,43 +610,43 @@ class MongoDBCollectionAttributes(AssetAttributes): ) """Name of the field containing the date in each time series document.""" - mongo_db_collection_time_granularity: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTimeGranularity" + mongo_db_time_granularity: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTimeGranularity" ) """Closest match to the time span between consecutive incoming measurements.""" - mongo_db_collection_expire_after_seconds: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionExpireAfterSeconds") + mongo_db_expire_after_seconds: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBExpireAfterSeconds" ) """Seconds after which documents in a time series collection or clustered collection expire.""" - mongo_db_collection_maximum_document_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionMaximumDocumentCount") + mongo_db_maximum_document_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaximumDocumentCount" ) """Maximum number of documents allowed in a capped collection.""" - mongo_db_collection_max_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionMaxSize" + mongo_db_max_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaxSize" ) """Maximum size allowed in a capped collection.""" - mongo_db_collection_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumOrphanDocs" + mongo_db_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumOrphanDocs" ) """Number of orphaned documents in the collection.""" - mongo_db_collection_num_indexes: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumIndexes" + mongo_db_num_indexes: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumIndexes" ) """Number of indexes on the collection.""" - mongo_db_collection_total_index_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTotalIndexSize" + mongo_db_total_index_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTotalIndexSize" ) """Total size of all indexes.""" - mongo_db_collection_average_object_size: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionAverageObjectSize") + mongo_db_average_object_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBAverageObjectSize" ) """Average size of an object in the collection.""" @@ -1088,26 +1020,16 @@ def _populate_mongo_db_collection_attrs( """Populate MongoDBCollection-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.mongo_db_collection_subtype = obj.mongo_db_collection_subtype - attrs.mongo_db_collection_is_capped = obj.mongo_db_collection_is_capped + attrs.mongo_db_is_capped = obj.mongo_db_is_capped attrs.mongo_db_collection_time_field = obj.mongo_db_collection_time_field - attrs.mongo_db_collection_time_granularity = ( - obj.mongo_db_collection_time_granularity - ) - attrs.mongo_db_collection_expire_after_seconds = ( - obj.mongo_db_collection_expire_after_seconds - ) - attrs.mongo_db_collection_maximum_document_count = ( - obj.mongo_db_collection_maximum_document_count - ) - attrs.mongo_db_collection_max_size = obj.mongo_db_collection_max_size - attrs.mongo_db_collection_num_orphan_docs = obj.mongo_db_collection_num_orphan_docs - attrs.mongo_db_collection_num_indexes = obj.mongo_db_collection_num_indexes - attrs.mongo_db_collection_total_index_size = ( - obj.mongo_db_collection_total_index_size - ) - attrs.mongo_db_collection_average_object_size = ( - obj.mongo_db_collection_average_object_size - ) + attrs.mongo_db_time_granularity = obj.mongo_db_time_granularity + attrs.mongo_db_expire_after_seconds = obj.mongo_db_expire_after_seconds + attrs.mongo_db_maximum_document_count = obj.mongo_db_maximum_document_count + attrs.mongo_db_max_size = obj.mongo_db_max_size + attrs.mongo_db_num_orphan_docs = obj.mongo_db_num_orphan_docs + attrs.mongo_db_num_indexes = obj.mongo_db_num_indexes + attrs.mongo_db_total_index_size = obj.mongo_db_total_index_size + attrs.mongo_db_average_object_size = obj.mongo_db_average_object_size attrs.mongo_db_collection_schema_definition = ( obj.mongo_db_collection_schema_definition ) @@ -1163,28 +1085,16 @@ def _extract_mongo_db_collection_attrs(attrs: MongoDBCollectionAttributes) -> di """Extract all MongoDBCollection attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["mongo_db_collection_subtype"] = attrs.mongo_db_collection_subtype - result["mongo_db_collection_is_capped"] = attrs.mongo_db_collection_is_capped + result["mongo_db_is_capped"] = attrs.mongo_db_is_capped result["mongo_db_collection_time_field"] = attrs.mongo_db_collection_time_field - result["mongo_db_collection_time_granularity"] = ( - attrs.mongo_db_collection_time_granularity - ) - result["mongo_db_collection_expire_after_seconds"] = ( - attrs.mongo_db_collection_expire_after_seconds - ) - result["mongo_db_collection_maximum_document_count"] = ( - attrs.mongo_db_collection_maximum_document_count - ) - result["mongo_db_collection_max_size"] = attrs.mongo_db_collection_max_size - result["mongo_db_collection_num_orphan_docs"] = ( - attrs.mongo_db_collection_num_orphan_docs - ) - result["mongo_db_collection_num_indexes"] = attrs.mongo_db_collection_num_indexes - result["mongo_db_collection_total_index_size"] = ( - attrs.mongo_db_collection_total_index_size - ) - result["mongo_db_collection_average_object_size"] = ( - attrs.mongo_db_collection_average_object_size - ) + result["mongo_db_time_granularity"] = attrs.mongo_db_time_granularity + result["mongo_db_expire_after_seconds"] = attrs.mongo_db_expire_after_seconds + result["mongo_db_maximum_document_count"] = attrs.mongo_db_maximum_document_count + result["mongo_db_max_size"] = attrs.mongo_db_max_size + result["mongo_db_num_orphan_docs"] = attrs.mongo_db_num_orphan_docs + result["mongo_db_num_indexes"] = attrs.mongo_db_num_indexes + result["mongo_db_total_index_size"] = attrs.mongo_db_total_index_size + result["mongo_db_average_object_size"] = attrs.mongo_db_average_object_size result["mongo_db_collection_schema_definition"] = ( attrs.mongo_db_collection_schema_definition ) @@ -1276,9 +1186,6 @@ def _mongo_db_collection_to_nested( is_incomplete=mongo_db_collection.is_incomplete, provenance_type=mongo_db_collection.provenance_type, home_id=mongo_db_collection.home_id, - depth=mongo_db_collection.depth, - immediate_upstream=mongo_db_collection.immediate_upstream, - immediate_downstream=mongo_db_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1314,6 +1221,7 @@ def _mongo_db_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1322,9 +1230,6 @@ def _mongo_db_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mongo_db_collection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1362,35 +1267,33 @@ def _mongo_db_collection_from_nested_bytes( "mongoDBCollectionSubtype", "mongoDBCollectionSubtype.text", ) -MongoDBCollection.MONGO_DB_COLLECTION_IS_CAPPED = BooleanField( - "mongoDBCollectionIsCapped", "mongoDBCollectionIsCapped" +MongoDBCollection.MONGO_DB_IS_CAPPED = BooleanField( + "mongoDBIsCapped", "mongoDBIsCapped" ) MongoDBCollection.MONGO_DB_COLLECTION_TIME_FIELD = KeywordField( "mongoDBCollectionTimeField", "mongoDBCollectionTimeField" ) -MongoDBCollection.MONGO_DB_COLLECTION_TIME_GRANULARITY = KeywordField( - "mongoDBCollectionTimeGranularity", "mongoDBCollectionTimeGranularity" -) -MongoDBCollection.MONGO_DB_COLLECTION_EXPIRE_AFTER_SECONDS = NumericField( - "mongoDBCollectionExpireAfterSeconds", "mongoDBCollectionExpireAfterSeconds" +MongoDBCollection.MONGO_DB_TIME_GRANULARITY = KeywordField( + "mongoDBTimeGranularity", "mongoDBTimeGranularity" ) -MongoDBCollection.MONGO_DB_COLLECTION_MAXIMUM_DOCUMENT_COUNT = NumericField( - "mongoDBCollectionMaximumDocumentCount", "mongoDBCollectionMaximumDocumentCount" +MongoDBCollection.MONGO_DB_EXPIRE_AFTER_SECONDS = NumericField( + "mongoDBExpireAfterSeconds", "mongoDBExpireAfterSeconds" ) -MongoDBCollection.MONGO_DB_COLLECTION_MAX_SIZE = NumericField( - "mongoDBCollectionMaxSize", "mongoDBCollectionMaxSize" +MongoDBCollection.MONGO_DB_MAXIMUM_DOCUMENT_COUNT = NumericField( + "mongoDBMaximumDocumentCount", "mongoDBMaximumDocumentCount" ) -MongoDBCollection.MONGO_DB_COLLECTION_NUM_ORPHAN_DOCS = NumericField( - "mongoDBCollectionNumOrphanDocs", "mongoDBCollectionNumOrphanDocs" +MongoDBCollection.MONGO_DB_MAX_SIZE = NumericField("mongoDBMaxSize", "mongoDBMaxSize") +MongoDBCollection.MONGO_DB_NUM_ORPHAN_DOCS = NumericField( + "mongoDBNumOrphanDocs", "mongoDBNumOrphanDocs" ) -MongoDBCollection.MONGO_DB_COLLECTION_NUM_INDEXES = NumericField( - "mongoDBCollectionNumIndexes", "mongoDBCollectionNumIndexes" +MongoDBCollection.MONGO_DB_NUM_INDEXES = NumericField( + "mongoDBNumIndexes", "mongoDBNumIndexes" ) -MongoDBCollection.MONGO_DB_COLLECTION_TOTAL_INDEX_SIZE = NumericField( - "mongoDBCollectionTotalIndexSize", "mongoDBCollectionTotalIndexSize" +MongoDBCollection.MONGO_DB_TOTAL_INDEX_SIZE = NumericField( + "mongoDBTotalIndexSize", "mongoDBTotalIndexSize" ) -MongoDBCollection.MONGO_DB_COLLECTION_AVERAGE_OBJECT_SIZE = NumericField( - "mongoDBCollectionAverageObjectSize", "mongoDBCollectionAverageObjectSize" +MongoDBCollection.MONGO_DB_AVERAGE_OBJECT_SIZE = NumericField( + "mongoDBAverageObjectSize", "mongoDBAverageObjectSize" ) MongoDBCollection.MONGO_DB_COLLECTION_SCHEMA_DEFINITION = KeywordField( "mongoDBCollectionSchemaDefinition", "mongoDBCollectionSchemaDefinition" diff --git a/pyatlan_v9/model/assets/mongo_db_database.py b/pyatlan_v9/model/assets/mongo_db_database.py index df3a42562..357d93c3f 100644 --- a/pyatlan_v9/model/assets/mongo_db_database.py +++ b/pyatlan_v9/model/assets/mongo_db_database.py @@ -49,7 +49,7 @@ from .fabric_related import RelatedFabricWorkspace from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity -from .mongo_db_related import RelatedMongoDBCollection, RelatedMongoDBDatabase +from .mongo_db_related import RelatedMongoDBCollection from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -132,6 +132,8 @@ class MongoDBDatabase(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MongoDBDatabase" + mongo_db_database_collection_count: Union[int, None, UnsetType] = msgspec.field( default=UNSET, name="mongoDBDatabaseCollectionCount" ) @@ -332,66 +334,6 @@ class MongoDBDatabase(Asset): def __post_init__(self) -> None: self.type_name = "MongoDBDatabase" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MongoDBDatabase instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MongoDBDatabase validation failed: {errors}") - - def minimize(self) -> "MongoDBDatabase": - """ - Return a minimal copy of this MongoDBDatabase with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MongoDBDatabase with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MongoDBDatabase instance with only the minimum required fields. - """ - self.validate() - return MongoDBDatabase(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMongoDBDatabase": - """ - Create a :class:`RelatedMongoDBDatabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMongoDBDatabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMongoDBDatabase(guid=self.guid) - return RelatedMongoDBDatabase(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -809,9 +751,6 @@ def _mongo_db_database_to_nested( is_incomplete=mongo_db_database.is_incomplete, provenance_type=mongo_db_database.provenance_type, home_id=mongo_db_database.home_id, - depth=mongo_db_database.depth, - immediate_upstream=mongo_db_database.immediate_upstream, - immediate_downstream=mongo_db_database.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -845,6 +784,7 @@ def _mongo_db_database_from_nested(nested: MongoDBDatabaseNested) -> MongoDBData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -853,9 +793,6 @@ def _mongo_db_database_from_nested(nested: MongoDBDatabaseNested) -> MongoDBData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mongo_db_database_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/mongo_db_related.py b/pyatlan_v9/model/assets/mongo_db_related.py index 52354d7c1..3dce83c5e 100644 --- a/pyatlan_v9/model/assets/mongo_db_related.py +++ b/pyatlan_v9/model/assets/mongo_db_related.py @@ -76,8 +76,8 @@ class RelatedMongoDBCollection(RelatedMongoDB): ) """Subtype of a MongoDB collection, for example: Capped, Time Series, etc.""" - mongo_db_collection_is_capped: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionIsCapped" + mongo_db_is_capped: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBIsCapped" ) """Whether the collection is capped (true) or not (false).""" @@ -86,43 +86,43 @@ class RelatedMongoDBCollection(RelatedMongoDB): ) """Name of the field containing the date in each time series document.""" - mongo_db_collection_time_granularity: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTimeGranularity" + mongo_db_time_granularity: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTimeGranularity" ) """Closest match to the time span between consecutive incoming measurements.""" - mongo_db_collection_expire_after_seconds: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionExpireAfterSeconds") + mongo_db_expire_after_seconds: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBExpireAfterSeconds" ) """Seconds after which documents in a time series collection or clustered collection expire.""" - mongo_db_collection_maximum_document_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionMaximumDocumentCount") + mongo_db_maximum_document_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaximumDocumentCount" ) """Maximum number of documents allowed in a capped collection.""" - mongo_db_collection_max_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionMaxSize" + mongo_db_max_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaxSize" ) """Maximum size allowed in a capped collection.""" - mongo_db_collection_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumOrphanDocs" + mongo_db_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumOrphanDocs" ) """Number of orphaned documents in the collection.""" - mongo_db_collection_num_indexes: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumIndexes" + mongo_db_num_indexes: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumIndexes" ) """Number of indexes on the collection.""" - mongo_db_collection_total_index_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTotalIndexSize" + mongo_db_total_index_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTotalIndexSize" ) """Total size of all indexes.""" - mongo_db_collection_average_object_size: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionAverageObjectSize") + mongo_db_average_object_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBAverageObjectSize" ) """Average size of an object in the collection.""" diff --git a/pyatlan_v9/model/assets/monte_carlo.py b/pyatlan_v9/model/assets/monte_carlo.py index 1bd4b9213..996424227 100644 --- a/pyatlan_v9/model/assets/monte_carlo.py +++ b/pyatlan_v9/model/assets/monte_carlo.py @@ -41,7 +41,7 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity -from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor, RelatedMonteCarlo +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -93,6 +93,8 @@ class MonteCarlo(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MonteCarlo" + mc_labels: Union[List[str], None, UnsetType] = UNSET """List of labels for this Monte Carlo asset.""" @@ -197,66 +199,6 @@ class MonteCarlo(Asset): def __post_init__(self) -> None: self.type_name = "MonteCarlo" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MonteCarlo instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MonteCarlo validation failed: {errors}") - - def minimize(self) -> "MonteCarlo": - """ - Return a minimal copy of this MonteCarlo with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MonteCarlo with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MonteCarlo instance with only the minimum required fields. - """ - self.validate() - return MonteCarlo(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMonteCarlo": - """ - Create a :class:`RelatedMonteCarlo` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMonteCarlo reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMonteCarlo(guid=self.guid) - return RelatedMonteCarlo(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -518,9 +460,6 @@ def _monte_carlo_to_nested(monte_carlo: MonteCarlo) -> MonteCarloNested: is_incomplete=monte_carlo.is_incomplete, provenance_type=monte_carlo.provenance_type, home_id=monte_carlo.home_id, - depth=monte_carlo.depth, - immediate_upstream=monte_carlo.immediate_upstream, - immediate_downstream=monte_carlo.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -552,6 +491,7 @@ def _monte_carlo_from_nested(nested: MonteCarloNested) -> MonteCarlo: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -560,9 +500,6 @@ def _monte_carlo_from_nested(nested: MonteCarloNested) -> MonteCarlo: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_monte_carlo_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/multi_dimensional_dataset.py b/pyatlan_v9/model/assets/multi_dimensional_dataset.py index 382308ff6..6acbf719a 100644 --- a/pyatlan_v9/model/assets/multi_dimensional_dataset.py +++ b/pyatlan_v9/model/assets/multi_dimensional_dataset.py @@ -37,7 +37,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cube_related import RelatedCubeDimension, RelatedMultiDimensionalDataset +from .cube_related import RelatedCubeDimension from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -98,6 +98,8 @@ class MultiDimensionalDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MultiDimensionalDataset" + cube_name: Union[str, None, UnsetType] = UNSET """Simple name of the cube in which this asset exists, or empty if it is itself a cube.""" @@ -214,68 +216,6 @@ class MultiDimensionalDataset(Asset): def __post_init__(self) -> None: self.type_name = "MultiDimensionalDataset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MultiDimensionalDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MultiDimensionalDataset validation failed: {errors}") - - def minimize(self) -> "MultiDimensionalDataset": - """ - Return a minimal copy of this MultiDimensionalDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MultiDimensionalDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MultiDimensionalDataset instance with only the minimum required fields. - """ - self.validate() - return MultiDimensionalDataset( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedMultiDimensionalDataset": - """ - Create a :class:`RelatedMultiDimensionalDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMultiDimensionalDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMultiDimensionalDataset(guid=self.guid) - return RelatedMultiDimensionalDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -568,9 +508,6 @@ def _multi_dimensional_dataset_to_nested( is_incomplete=multi_dimensional_dataset.is_incomplete, provenance_type=multi_dimensional_dataset.provenance_type, home_id=multi_dimensional_dataset.home_id, - depth=multi_dimensional_dataset.depth, - immediate_upstream=multi_dimensional_dataset.immediate_upstream, - immediate_downstream=multi_dimensional_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -606,6 +543,7 @@ def _multi_dimensional_dataset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -614,9 +552,6 @@ def _multi_dimensional_dataset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_multi_dimensional_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/namespace.py b/pyatlan_v9/model/assets/namespace.py index ea336895e..93b0bfa6b 100644 --- a/pyatlan_v9/model/assets/namespace.py +++ b/pyatlan_v9/model/assets/namespace.py @@ -40,7 +40,7 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor -from .namespace_related import RelatedFolder, RelatedNamespace +from .namespace_related import RelatedFolder from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -79,6 +79,8 @@ class Namespace(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Namespace" + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET """Checks that run on this asset.""" @@ -148,66 +150,6 @@ class Namespace(Asset): def __post_init__(self) -> None: self.type_name = "Namespace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Namespace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Namespace validation failed: {errors}") - - def minimize(self) -> "Namespace": - """ - Return a minimal copy of this Namespace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Namespace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Namespace instance with only the minimum required fields. - """ - self.validate() - return Namespace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedNamespace": - """ - Create a :class:`RelatedNamespace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedNamespace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedNamespace(guid=self.guid) - return RelatedNamespace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -421,9 +363,6 @@ def _namespace_to_nested(namespace: Namespace) -> NamespaceNested: is_incomplete=namespace.is_incomplete, provenance_type=namespace.provenance_type, home_id=namespace.home_id, - depth=namespace.depth, - immediate_upstream=namespace.immediate_upstream, - immediate_downstream=namespace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -455,6 +394,7 @@ def _namespace_from_nested(nested: NamespaceNested) -> Namespace: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -463,9 +403,6 @@ def _namespace_from_nested(nested: NamespaceNested) -> Namespace: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_namespace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/no_sql.py b/pyatlan_v9/model/assets/no_sql.py index fc9d9ea5a..552a12369 100644 --- a/pyatlan_v9/model/assets/no_sql.py +++ b/pyatlan_v9/model/assets/no_sql.py @@ -38,7 +38,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedNoSQL from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -93,6 +92,8 @@ class NoSQL(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "NoSQL" + no_sql_schema_definition: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="noSQLSchemaDefinition" ) @@ -193,66 +194,6 @@ class NoSQL(Asset): def __post_init__(self) -> None: self.type_name = "NoSQL" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this NoSQL instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"NoSQL validation failed: {errors}") - - def minimize(self) -> "NoSQL": - """ - Return a minimal copy of this NoSQL with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new NoSQL with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new NoSQL instance with only the minimum required fields. - """ - self.validate() - return NoSQL(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedNoSQL": - """ - Create a :class:`RelatedNoSQL` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedNoSQL reference to this asset. - """ - if self.guid is not UNSET: - return RelatedNoSQL(guid=self.guid) - return RelatedNoSQL(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -506,9 +447,6 @@ def _no_sql_to_nested(no_sql: NoSQL) -> NoSQLNested: is_incomplete=no_sql.is_incomplete, provenance_type=no_sql.provenance_type, home_id=no_sql.home_id, - depth=no_sql.depth, - immediate_upstream=no_sql.immediate_upstream, - immediate_downstream=no_sql.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -538,6 +476,7 @@ def _no_sql_from_nested(nested: NoSQLNested) -> NoSQL: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -546,9 +485,6 @@ def _no_sql_from_nested(nested: NoSQLNested) -> NoSQL: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_no_sql_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/notebook.py b/pyatlan_v9/model/assets/notebook.py index cc1eaf407..a47d9c884 100644 --- a/pyatlan_v9/model/assets/notebook.py +++ b/pyatlan_v9/model/assets/notebook.py @@ -42,7 +42,6 @@ from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor -from .notebook_related import RelatedNotebook from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -91,6 +90,8 @@ class Notebook(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Notebook" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class Notebook(Asset): def __post_init__(self) -> None: self.type_name = "Notebook" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Notebook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Notebook validation failed: {errors}") - - def minimize(self) -> "Notebook": - """ - Return a minimal copy of this Notebook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Notebook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Notebook instance with only the minimum required fields. - """ - self.validate() - return Notebook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedNotebook": - """ - Create a :class:`RelatedNotebook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedNotebook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedNotebook(guid=self.guid) - return RelatedNotebook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _notebook_to_nested(notebook: Notebook) -> NotebookNested: is_incomplete=notebook.is_incomplete, provenance_type=notebook.provenance_type, home_id=notebook.home_id, - depth=notebook.depth, - immediate_upstream=notebook.immediate_upstream, - immediate_downstream=notebook.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -527,6 +465,7 @@ def _notebook_from_nested(nested: NotebookNested) -> Notebook: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -535,9 +474,6 @@ def _notebook_from_nested(nested: NotebookNested) -> Notebook: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_notebook_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/object_store.py b/pyatlan_v9/model/assets/object_store.py index 0eb20b19c..4da05f90a 100644 --- a/pyatlan_v9/model/assets/object_store.py +++ b/pyatlan_v9/model/assets/object_store.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedObjectStore from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class ObjectStore(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ObjectStore" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class ObjectStore(Asset): def __post_init__(self) -> None: self.type_name = "ObjectStore" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ObjectStore instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ObjectStore validation failed: {errors}") - - def minimize(self) -> "ObjectStore": - """ - Return a minimal copy of this ObjectStore with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ObjectStore with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ObjectStore instance with only the minimum required fields. - """ - self.validate() - return ObjectStore(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedObjectStore": - """ - Create a :class:`RelatedObjectStore` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedObjectStore reference to this asset. - """ - if self.guid is not UNSET: - return RelatedObjectStore(guid=self.guid) - return RelatedObjectStore(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -495,9 +436,6 @@ def _object_store_to_nested(object_store: ObjectStore) -> ObjectStoreNested: is_incomplete=object_store.is_incomplete, provenance_type=object_store.provenance_type, home_id=object_store.home_id, - depth=object_store.depth, - immediate_upstream=object_store.immediate_upstream, - immediate_downstream=object_store.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -529,6 +467,7 @@ def _object_store_from_nested(nested: ObjectStoreNested) -> ObjectStore: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -537,9 +476,6 @@ def _object_store_from_nested(nested: ObjectStoreNested) -> ObjectStore: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_object_store_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/partial.py b/pyatlan_v9/model/assets/partial.py index c28778e35..82701b0b7 100644 --- a/pyatlan_v9/model/assets/partial.py +++ b/pyatlan_v9/model/assets/partial.py @@ -43,7 +43,7 @@ from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor -from .partial_related import RelatedPartial, RelatedPartialField, RelatedPartialObject +from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -96,6 +96,8 @@ class Partial(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Partial" + partial_structure_json: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="partialStructureJSON" ) @@ -208,66 +210,6 @@ class Partial(Asset): def __post_init__(self) -> None: self.type_name = "Partial" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Partial instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Partial validation failed: {errors}") - - def minimize(self) -> "Partial": - """ - Return a minimal copy of this Partial with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Partial with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Partial instance with only the minimum required fields. - """ - self.validate() - return Partial(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPartial": - """ - Create a :class:`RelatedPartial` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPartial reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPartial(guid=self.guid) - return RelatedPartial(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -543,9 +485,6 @@ def _partial_to_nested(partial: Partial) -> PartialNested: is_incomplete=partial.is_incomplete, provenance_type=partial.provenance_type, home_id=partial.home_id, - depth=partial.depth, - immediate_upstream=partial.immediate_upstream, - immediate_downstream=partial.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -575,6 +514,7 @@ def _partial_from_nested(nested: PartialNested) -> Partial: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -583,9 +523,6 @@ def _partial_from_nested(nested: PartialNested) -> Partial: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_partial_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/partial_field.py b/pyatlan_v9/model/assets/partial_field.py index 48c5c8094..d25c00839 100644 --- a/pyatlan_v9/model/assets/partial_field.py +++ b/pyatlan_v9/model/assets/partial_field.py @@ -100,6 +100,8 @@ class PartialField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PartialField" + partial_data_type: Union[str, None, UnsetType] = UNSET """Type of data captured as values in the field.""" @@ -224,70 +226,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PartialField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"PartialField validation failed: {errors}") - - def minimize(self) -> "PartialField": - """ - Return a minimal copy of this PartialField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PartialField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PartialField instance with only the minimum required fields. - """ - self.validate() - return PartialField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPartialField": - """ - Create a :class:`RelatedPartialField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPartialField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPartialField(guid=self.guid) - return RelatedPartialField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -576,9 +514,6 @@ def _partial_field_to_nested(partial_field: PartialField) -> PartialFieldNested: is_incomplete=partial_field.is_incomplete, provenance_type=partial_field.provenance_type, home_id=partial_field.home_id, - depth=partial_field.depth, - immediate_upstream=partial_field.immediate_upstream, - immediate_downstream=partial_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -612,6 +547,7 @@ def _partial_field_from_nested(nested: PartialFieldNested) -> PartialField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -620,9 +556,6 @@ def _partial_field_from_nested(nested: PartialFieldNested) -> PartialField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_partial_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/partial_object.py b/pyatlan_v9/model/assets/partial_object.py index 8e2c91b2c..2a1e536ec 100644 --- a/pyatlan_v9/model/assets/partial_object.py +++ b/pyatlan_v9/model/assets/partial_object.py @@ -99,6 +99,8 @@ class PartialObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PartialObject" + partial_structure_json: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="partialStructureJSON" ) @@ -220,70 +222,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PartialObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"PartialObject validation failed: {errors}") - - def minimize(self) -> "PartialObject": - """ - Return a minimal copy of this PartialObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PartialObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PartialObject instance with only the minimum required fields. - """ - self.validate() - return PartialObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPartialObject": - """ - Create a :class:`RelatedPartialObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPartialObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPartialObject(guid=self.guid) - return RelatedPartialObject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -567,9 +505,6 @@ def _partial_object_to_nested(partial_object: PartialObject) -> PartialObjectNes is_incomplete=partial_object.is_incomplete, provenance_type=partial_object.provenance_type, home_id=partial_object.home_id, - depth=partial_object.depth, - immediate_upstream=partial_object.immediate_upstream, - immediate_downstream=partial_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -603,6 +538,7 @@ def _partial_object_from_nested(nested: PartialObjectNested) -> PartialObject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -611,9 +547,6 @@ def _partial_object_from_nested(nested: PartialObjectNested) -> PartialObject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_partial_object_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi.py b/pyatlan_v9/model/assets/power_bi.py index 190de3494..39e429ec4 100644 --- a/pyatlan_v9/model/assets/power_bi.py +++ b/pyatlan_v9/model/assets/power_bi.py @@ -44,7 +44,6 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import RelatedPowerBI from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -98,6 +97,8 @@ class PowerBI(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBI" + power_bi_is_hidden: Union[bool, None, UnsetType] = msgspec.field( default=UNSET, name="powerBIIsHidden" ) @@ -223,66 +224,6 @@ class PowerBI(Asset): def __post_init__(self) -> None: self.type_name = "PowerBI" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBI instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"PowerBI validation failed: {errors}") - - def minimize(self) -> "PowerBI": - """ - Return a minimal copy of this PowerBI with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBI with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBI instance with only the minimum required fields. - """ - self.validate() - return PowerBI(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBI": - """ - Create a :class:`RelatedPowerBI` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBI reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBI(guid=self.guid) - return RelatedPowerBI(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -571,9 +512,6 @@ def _power_bi_to_nested(power_bi: PowerBI) -> PowerBINested: is_incomplete=power_bi.is_incomplete, provenance_type=power_bi.provenance_type, home_id=power_bi.home_id, - depth=power_bi.depth, - immediate_upstream=power_bi.immediate_upstream, - immediate_downstream=power_bi.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -603,6 +541,7 @@ def _power_bi_from_nested(nested: PowerBINested) -> PowerBI: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -611,9 +550,6 @@ def _power_bi_from_nested(nested: PowerBINested) -> PowerBI: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_app.py b/pyatlan_v9/model/assets/power_bi_app.py index aa67a7b71..94ba6a6e9 100644 --- a/pyatlan_v9/model/assets/power_bi_app.py +++ b/pyatlan_v9/model/assets/power_bi_app.py @@ -44,11 +44,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import ( - RelatedPowerBIApp, - RelatedPowerBIDashboard, - RelatedPowerBIReport, -) +from .power_bi_related import RelatedPowerBIDashboard, RelatedPowerBIReport from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -107,6 +103,8 @@ class PowerBIApp(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIApp" + power_bi_app_id: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="powerBIAppId" ) @@ -257,66 +255,6 @@ class PowerBIApp(Asset): def __post_init__(self) -> None: self.type_name = "PowerBIApp" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIApp instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"PowerBIApp validation failed: {errors}") - - def minimize(self) -> "PowerBIApp": - """ - Return a minimal copy of this PowerBIApp with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIApp with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIApp instance with only the minimum required fields. - """ - self.validate() - return PowerBIApp(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIApp": - """ - Create a :class:`RelatedPowerBIApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIApp(guid=self.guid) - return RelatedPowerBIApp(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -638,9 +576,6 @@ def _power_bi_app_to_nested(power_bi_app: PowerBIApp) -> PowerBIAppNested: is_incomplete=power_bi_app.is_incomplete, provenance_type=power_bi_app.provenance_type, home_id=power_bi_app.home_id, - depth=power_bi_app.depth, - immediate_upstream=power_bi_app.immediate_upstream, - immediate_downstream=power_bi_app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -672,6 +607,7 @@ def _power_bi_app_from_nested(nested: PowerBIAppNested) -> PowerBIApp: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -680,9 +616,6 @@ def _power_bi_app_from_nested(nested: PowerBIAppNested) -> PowerBIApp: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_app_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_column.py b/pyatlan_v9/model/assets/power_bi_column.py index ce0a7b3cb..af243b2d5 100644 --- a/pyatlan_v9/model/assets/power_bi_column.py +++ b/pyatlan_v9/model/assets/power_bi_column.py @@ -45,11 +45,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import ( - RelatedPowerBIColumn, - RelatedPowerBIMeasure, - RelatedPowerBITable, -) +from .power_bi_related import RelatedPowerBIMeasure, RelatedPowerBITable from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -111,6 +107,8 @@ class PowerBIColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIColumn" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this column exists.""" @@ -278,78 +276,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.table is UNSET: - errors.append("table is required for creation") - if self.power_bi_table_qualified_name is UNSET: - errors.append("power_bi_table_qualified_name is required for creation") - if self.dataset_qualified_name is UNSET: - errors.append("dataset_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIColumn validation failed: {errors}") - - def minimize(self) -> "PowerBIColumn": - """ - Return a minimal copy of this PowerBIColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIColumn instance with only the minimum required fields. - """ - self.validate() - return PowerBIColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIColumn": - """ - Create a :class:`RelatedPowerBIColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIColumn(guid=self.guid) - return RelatedPowerBIColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -692,9 +618,6 @@ def _power_bi_column_to_nested(power_bi_column: PowerBIColumn) -> PowerBIColumnN is_incomplete=power_bi_column.is_incomplete, provenance_type=power_bi_column.provenance_type, home_id=power_bi_column.home_id, - depth=power_bi_column.depth, - immediate_upstream=power_bi_column.immediate_upstream, - immediate_downstream=power_bi_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -728,6 +651,7 @@ def _power_bi_column_from_nested(nested: PowerBIColumnNested) -> PowerBIColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -736,9 +660,6 @@ def _power_bi_column_from_nested(nested: PowerBIColumnNested) -> PowerBIColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_dashboard.py b/pyatlan_v9/model/assets/power_bi_dashboard.py index 8eee1f2be..819b38681 100644 --- a/pyatlan_v9/model/assets/power_bi_dashboard.py +++ b/pyatlan_v9/model/assets/power_bi_dashboard.py @@ -47,7 +47,6 @@ from .partial_related import RelatedPartialField, RelatedPartialObject from .power_bi_related import ( RelatedPowerBIApp, - RelatedPowerBIDashboard, RelatedPowerBITile, RelatedPowerBIWorkspace, ) @@ -110,6 +109,8 @@ class PowerBIDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDashboard" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this dashboard exists.""" @@ -261,74 +262,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workspace is UNSET: - errors.append("workspace is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIDashboard validation failed: {errors}") - - def minimize(self) -> "PowerBIDashboard": - """ - Return a minimal copy of this PowerBIDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDashboard instance with only the minimum required fields. - """ - self.validate() - return PowerBIDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIDashboard": - """ - Create a :class:`RelatedPowerBIDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDashboard(guid=self.guid) - return RelatedPowerBIDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -656,9 +589,6 @@ def _power_bi_dashboard_to_nested( is_incomplete=power_bi_dashboard.is_incomplete, provenance_type=power_bi_dashboard.provenance_type, home_id=power_bi_dashboard.home_id, - depth=power_bi_dashboard.depth, - immediate_upstream=power_bi_dashboard.immediate_upstream, - immediate_downstream=power_bi_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -692,6 +622,7 @@ def _power_bi_dashboard_from_nested(nested: PowerBIDashboardNested) -> PowerBIDa updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -700,9 +631,6 @@ def _power_bi_dashboard_from_nested(nested: PowerBIDashboardNested) -> PowerBIDa is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_dataflow.py b/pyatlan_v9/model/assets/power_bi_dataflow.py index 1c61f985d..6075fa73a 100644 --- a/pyatlan_v9/model/assets/power_bi_dataflow.py +++ b/pyatlan_v9/model/assets/power_bi_dataflow.py @@ -119,6 +119,8 @@ class PowerBIDataflow(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDataflow" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this dataflow exists.""" @@ -305,74 +307,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDataflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workspace is UNSET: - errors.append("workspace is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIDataflow validation failed: {errors}") - - def minimize(self) -> "PowerBIDataflow": - """ - Return a minimal copy of this PowerBIDataflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDataflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDataflow instance with only the minimum required fields. - """ - self.validate() - return PowerBIDataflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIDataflow": - """ - Create a :class:`RelatedPowerBIDataflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDataflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDataflow(guid=self.guid) - return RelatedPowerBIDataflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -756,9 +690,6 @@ def _power_bi_dataflow_to_nested( is_incomplete=power_bi_dataflow.is_incomplete, provenance_type=power_bi_dataflow.provenance_type, home_id=power_bi_dataflow.home_id, - depth=power_bi_dataflow.depth, - immediate_upstream=power_bi_dataflow.immediate_upstream, - immediate_downstream=power_bi_dataflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -792,6 +723,7 @@ def _power_bi_dataflow_from_nested(nested: PowerBIDataflowNested) -> PowerBIData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -800,9 +732,6 @@ def _power_bi_dataflow_from_nested(nested: PowerBIDataflowNested) -> PowerBIData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_dataflow_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_dataflow_entity_column.py b/pyatlan_v9/model/assets/power_bi_dataflow_entity_column.py index b3bef4f42..179d3510e 100644 --- a/pyatlan_v9/model/assets/power_bi_dataflow_entity_column.py +++ b/pyatlan_v9/model/assets/power_bi_dataflow_entity_column.py @@ -45,7 +45,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import RelatedPowerBIDataflow, RelatedPowerBIDataflowEntityColumn +from .power_bi_related import RelatedPowerBIDataflow from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -104,6 +104,8 @@ class PowerBIDataflowEntityColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDataflowEntityColumn" + power_bi_dataflow_entity_name: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="powerBIDataflowEntityName" ) @@ -262,82 +264,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDataflowEntityColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.power_bi_dataflow is UNSET: - errors.append("power_bi_dataflow is required for creation") - if self.power_bi_dataflow_qualified_name is UNSET: - errors.append( - "power_bi_dataflow_qualified_name is required for creation" - ) - if self.power_bi_workspace_qualified_name is UNSET: - errors.append( - "power_bi_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"PowerBIDataflowEntityColumn validation failed: {errors}") - - def minimize(self) -> "PowerBIDataflowEntityColumn": - """ - Return a minimal copy of this PowerBIDataflowEntityColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDataflowEntityColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDataflowEntityColumn instance with only the minimum required fields. - """ - self.validate() - return PowerBIDataflowEntityColumn( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedPowerBIDataflowEntityColumn": - """ - Create a :class:`RelatedPowerBIDataflowEntityColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDataflowEntityColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDataflowEntityColumn(guid=self.guid) - return RelatedPowerBIDataflowEntityColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -680,9 +606,6 @@ def _power_bi_dataflow_entity_column_to_nested( is_incomplete=power_bi_dataflow_entity_column.is_incomplete, provenance_type=power_bi_dataflow_entity_column.provenance_type, home_id=power_bi_dataflow_entity_column.home_id, - depth=power_bi_dataflow_entity_column.depth, - immediate_upstream=power_bi_dataflow_entity_column.immediate_upstream, - immediate_downstream=power_bi_dataflow_entity_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -718,6 +641,7 @@ def _power_bi_dataflow_entity_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -726,9 +650,6 @@ def _power_bi_dataflow_entity_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_dataflow_entity_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_dataset.py b/pyatlan_v9/model/assets/power_bi_dataset.py index 87b3c50ea..661d0b557 100644 --- a/pyatlan_v9/model/assets/power_bi_dataset.py +++ b/pyatlan_v9/model/assets/power_bi_dataset.py @@ -47,7 +47,6 @@ from .partial_related import RelatedPartialField, RelatedPartialObject from .power_bi_related import ( RelatedPowerBIDataflow, - RelatedPowerBIDataset, RelatedPowerBIDatasource, RelatedPowerBIReport, RelatedPowerBITable, @@ -115,6 +114,8 @@ class PowerBIDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDataset" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this dataset exists.""" @@ -270,74 +271,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workspace is UNSET: - errors.append("workspace is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIDataset validation failed: {errors}") - - def minimize(self) -> "PowerBIDataset": - """ - Return a minimal copy of this PowerBIDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDataset instance with only the minimum required fields. - """ - self.validate() - return PowerBIDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIDataset": - """ - Create a :class:`RelatedPowerBIDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDataset(guid=self.guid) - return RelatedPowerBIDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -668,9 +601,6 @@ def _power_bi_dataset_to_nested( is_incomplete=power_bi_dataset.is_incomplete, provenance_type=power_bi_dataset.provenance_type, home_id=power_bi_dataset.home_id, - depth=power_bi_dataset.depth, - immediate_upstream=power_bi_dataset.immediate_upstream, - immediate_downstream=power_bi_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -704,6 +634,7 @@ def _power_bi_dataset_from_nested(nested: PowerBIDatasetNested) -> PowerBIDatase updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -712,9 +643,6 @@ def _power_bi_dataset_from_nested(nested: PowerBIDatasetNested) -> PowerBIDatase is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_datasource.py b/pyatlan_v9/model/assets/power_bi_datasource.py index dd42627bc..c0477e754 100644 --- a/pyatlan_v9/model/assets/power_bi_datasource.py +++ b/pyatlan_v9/model/assets/power_bi_datasource.py @@ -45,11 +45,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import ( - RelatedPowerBIDataflow, - RelatedPowerBIDataset, - RelatedPowerBIDatasource, -) +from .power_bi_related import RelatedPowerBIDataflow, RelatedPowerBIDataset from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -106,6 +102,8 @@ class PowerBIDatasource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDatasource" + connection_details: Union[Dict[str, str], None, UnsetType] = UNSET """Connection details of the datasource.""" @@ -250,72 +248,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDatasource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.datasets is UNSET: - errors.append("datasets is required for creation") - if errors: - raise ValueError(f"PowerBIDatasource validation failed: {errors}") - - def minimize(self) -> "PowerBIDatasource": - """ - Return a minimal copy of this PowerBIDatasource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDatasource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDatasource instance with only the minimum required fields. - """ - self.validate() - return PowerBIDatasource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIDatasource": - """ - Create a :class:`RelatedPowerBIDatasource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDatasource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDatasource(guid=self.guid) - return RelatedPowerBIDatasource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -629,9 +561,6 @@ def _power_bi_datasource_to_nested( is_incomplete=power_bi_datasource.is_incomplete, provenance_type=power_bi_datasource.provenance_type, home_id=power_bi_datasource.home_id, - depth=power_bi_datasource.depth, - immediate_upstream=power_bi_datasource.immediate_upstream, - immediate_downstream=power_bi_datasource.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -667,6 +596,7 @@ def _power_bi_datasource_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -675,9 +605,6 @@ def _power_bi_datasource_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_datasource_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_measure.py b/pyatlan_v9/model/assets/power_bi_measure.py index 9ee51d687..ee22d829f 100644 --- a/pyatlan_v9/model/assets/power_bi_measure.py +++ b/pyatlan_v9/model/assets/power_bi_measure.py @@ -45,11 +45,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import ( - RelatedPowerBIColumn, - RelatedPowerBIMeasure, - RelatedPowerBITable, -) +from .power_bi_related import RelatedPowerBIColumn, RelatedPowerBITable from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -109,6 +105,8 @@ class PowerBIMeasure(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIMeasure" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this measure exists.""" @@ -266,78 +264,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIMeasure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.table is UNSET: - errors.append("table is required for creation") - if self.power_bi_table_qualified_name is UNSET: - errors.append("power_bi_table_qualified_name is required for creation") - if self.dataset_qualified_name is UNSET: - errors.append("dataset_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIMeasure validation failed: {errors}") - - def minimize(self) -> "PowerBIMeasure": - """ - Return a minimal copy of this PowerBIMeasure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIMeasure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIMeasure instance with only the minimum required fields. - """ - self.validate() - return PowerBIMeasure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIMeasure": - """ - Create a :class:`RelatedPowerBIMeasure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIMeasure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIMeasure(guid=self.guid) - return RelatedPowerBIMeasure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -668,9 +594,6 @@ def _power_bi_measure_to_nested( is_incomplete=power_bi_measure.is_incomplete, provenance_type=power_bi_measure.provenance_type, home_id=power_bi_measure.home_id, - depth=power_bi_measure.depth, - immediate_upstream=power_bi_measure.immediate_upstream, - immediate_downstream=power_bi_measure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -704,6 +627,7 @@ def _power_bi_measure_from_nested(nested: PowerBIMeasureNested) -> PowerBIMeasur updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -712,9 +636,6 @@ def _power_bi_measure_from_nested(nested: PowerBIMeasureNested) -> PowerBIMeasur is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_measure_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_page.py b/pyatlan_v9/model/assets/power_bi_page.py index ce78c5eed..4aeb024ef 100644 --- a/pyatlan_v9/model/assets/power_bi_page.py +++ b/pyatlan_v9/model/assets/power_bi_page.py @@ -45,7 +45,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import RelatedPowerBIPage, RelatedPowerBIReport +from .power_bi_related import RelatedPowerBIReport from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -102,6 +102,8 @@ class PowerBIPage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIPage" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this page exists.""" @@ -244,76 +246,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIPage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.report is UNSET: - errors.append("report is required for creation") - if self.report_qualified_name is UNSET: - errors.append("report_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIPage validation failed: {errors}") - - def minimize(self) -> "PowerBIPage": - """ - Return a minimal copy of this PowerBIPage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIPage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIPage instance with only the minimum required fields. - """ - self.validate() - return PowerBIPage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIPage": - """ - Create a :class:`RelatedPowerBIPage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIPage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIPage(guid=self.guid) - return RelatedPowerBIPage(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -618,9 +550,6 @@ def _power_bi_page_to_nested(power_bi_page: PowerBIPage) -> PowerBIPageNested: is_incomplete=power_bi_page.is_incomplete, provenance_type=power_bi_page.provenance_type, home_id=power_bi_page.home_id, - depth=power_bi_page.depth, - immediate_upstream=power_bi_page.immediate_upstream, - immediate_downstream=power_bi_page.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -652,6 +581,7 @@ def _power_bi_page_from_nested(nested: PowerBIPageNested) -> PowerBIPage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -660,9 +590,6 @@ def _power_bi_page_from_nested(nested: PowerBIPageNested) -> PowerBIPage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_page_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_report.py b/pyatlan_v9/model/assets/power_bi_report.py index 08c86a59f..c2ee34475 100644 --- a/pyatlan_v9/model/assets/power_bi_report.py +++ b/pyatlan_v9/model/assets/power_bi_report.py @@ -49,7 +49,6 @@ RelatedPowerBIApp, RelatedPowerBIDataset, RelatedPowerBIPage, - RelatedPowerBIReport, RelatedPowerBITile, RelatedPowerBIWorkspace, ) @@ -115,6 +114,8 @@ class PowerBIReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIReport" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this report exists.""" @@ -275,74 +276,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workspace is UNSET: - errors.append("workspace is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIReport validation failed: {errors}") - - def minimize(self) -> "PowerBIReport": - """ - Return a minimal copy of this PowerBIReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIReport instance with only the minimum required fields. - """ - self.validate() - return PowerBIReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIReport": - """ - Create a :class:`RelatedPowerBIReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIReport(guid=self.guid) - return RelatedPowerBIReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -679,9 +612,6 @@ def _power_bi_report_to_nested(power_bi_report: PowerBIReport) -> PowerBIReportN is_incomplete=power_bi_report.is_incomplete, provenance_type=power_bi_report.provenance_type, home_id=power_bi_report.home_id, - depth=power_bi_report.depth, - immediate_upstream=power_bi_report.immediate_upstream, - immediate_downstream=power_bi_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -715,6 +645,7 @@ def _power_bi_report_from_nested(nested: PowerBIReportNested) -> PowerBIReport: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -723,9 +654,6 @@ def _power_bi_report_from_nested(nested: PowerBIReportNested) -> PowerBIReport: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_report_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_table.py b/pyatlan_v9/model/assets/power_bi_table.py index c5ead96dc..b660ef8e5 100644 --- a/pyatlan_v9/model/assets/power_bi_table.py +++ b/pyatlan_v9/model/assets/power_bi_table.py @@ -50,7 +50,6 @@ RelatedPowerBIDataflow, RelatedPowerBIDataset, RelatedPowerBIMeasure, - RelatedPowerBITable, ) from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -115,6 +114,8 @@ class PowerBITable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBITable" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this table exists.""" @@ -284,76 +285,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBITable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dataset is UNSET: - errors.append("dataset is required for creation") - if self.dataset_qualified_name is UNSET: - errors.append("dataset_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBITable validation failed: {errors}") - - def minimize(self) -> "PowerBITable": - """ - Return a minimal copy of this PowerBITable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBITable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBITable instance with only the minimum required fields. - """ - self.validate() - return PowerBITable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBITable": - """ - Create a :class:`RelatedPowerBITable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBITable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBITable(guid=self.guid) - return RelatedPowerBITable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -700,9 +631,6 @@ def _power_bi_table_to_nested(power_bi_table: PowerBITable) -> PowerBITableNeste is_incomplete=power_bi_table.is_incomplete, provenance_type=power_bi_table.provenance_type, home_id=power_bi_table.home_id, - depth=power_bi_table.depth, - immediate_upstream=power_bi_table.immediate_upstream, - immediate_downstream=power_bi_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -736,6 +664,7 @@ def _power_bi_table_from_nested(nested: PowerBITableNested) -> PowerBITable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -744,9 +673,6 @@ def _power_bi_table_from_nested(nested: PowerBITableNested) -> PowerBITable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_table_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_tile.py b/pyatlan_v9/model/assets/power_bi_tile.py index c72147c51..8870fd104 100644 --- a/pyatlan_v9/model/assets/power_bi_tile.py +++ b/pyatlan_v9/model/assets/power_bi_tile.py @@ -49,7 +49,6 @@ RelatedPowerBIDashboard, RelatedPowerBIDataset, RelatedPowerBIReport, - RelatedPowerBITile, ) from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -109,6 +108,8 @@ class PowerBITile(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBITile" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this tile exists.""" @@ -257,76 +258,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBITile instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dashboard is UNSET: - errors.append("dashboard is required for creation") - if self.dashboard_qualified_name is UNSET: - errors.append("dashboard_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBITile validation failed: {errors}") - - def minimize(self) -> "PowerBITile": - """ - Return a minimal copy of this PowerBITile with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBITile with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBITile instance with only the minimum required fields. - """ - self.validate() - return PowerBITile(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBITile": - """ - Create a :class:`RelatedPowerBITile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBITile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBITile(guid=self.guid) - return RelatedPowerBITile(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -639,9 +570,6 @@ def _power_bi_tile_to_nested(power_bi_tile: PowerBITile) -> PowerBITileNested: is_incomplete=power_bi_tile.is_incomplete, provenance_type=power_bi_tile.provenance_type, home_id=power_bi_tile.home_id, - depth=power_bi_tile.depth, - immediate_upstream=power_bi_tile.immediate_upstream, - immediate_downstream=power_bi_tile.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -673,6 +601,7 @@ def _power_bi_tile_from_nested(nested: PowerBITileNested) -> PowerBITile: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -681,9 +610,6 @@ def _power_bi_tile_from_nested(nested: PowerBITileNested) -> PowerBITile: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_tile_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/power_bi_workspace.py b/pyatlan_v9/model/assets/power_bi_workspace.py index 5df214e8b..dbc4d7e7c 100644 --- a/pyatlan_v9/model/assets/power_bi_workspace.py +++ b/pyatlan_v9/model/assets/power_bi_workspace.py @@ -49,7 +49,6 @@ RelatedPowerBIDataflow, RelatedPowerBIDataset, RelatedPowerBIReport, - RelatedPowerBIWorkspace, ) from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -113,6 +112,8 @@ class PowerBIWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIWorkspace" + web_url: Union[str, None, UnsetType] = UNSET """Deprecated.""" @@ -265,66 +266,6 @@ class PowerBIWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "PowerBIWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"PowerBIWorkspace validation failed: {errors}") - - def minimize(self) -> "PowerBIWorkspace": - """ - Return a minimal copy of this PowerBIWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIWorkspace instance with only the minimum required fields. - """ - self.validate() - return PowerBIWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIWorkspace": - """ - Create a :class:`RelatedPowerBIWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIWorkspace(guid=self.guid) - return RelatedPowerBIWorkspace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -664,9 +605,6 @@ def _power_bi_workspace_to_nested( is_incomplete=power_bi_workspace.is_incomplete, provenance_type=power_bi_workspace.provenance_type, home_id=power_bi_workspace.home_id, - depth=power_bi_workspace.depth, - immediate_upstream=power_bi_workspace.immediate_upstream, - immediate_downstream=power_bi_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -700,6 +638,7 @@ def _power_bi_workspace_from_nested(nested: PowerBIWorkspaceNested) -> PowerBIWo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -708,9 +647,6 @@ def _power_bi_workspace_from_nested(nested: PowerBIWorkspaceNested) -> PowerBIWo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/preset.py b/pyatlan_v9/model/assets/preset.py index dbbdbc6ea..1098c1ab4 100644 --- a/pyatlan_v9/model/assets/preset.py +++ b/pyatlan_v9/model/assets/preset.py @@ -43,7 +43,6 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .preset_related import RelatedPreset from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -95,6 +94,8 @@ class Preset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Preset" + preset_workspace_id: Union[int, None, UnsetType] = UNSET """Identifier of the workspace in which this asset exists, in Preset.""" @@ -202,66 +203,6 @@ class Preset(Asset): def __post_init__(self) -> None: self.type_name = "Preset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Preset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Preset validation failed: {errors}") - - def minimize(self) -> "Preset": - """ - Return a minimal copy of this Preset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Preset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Preset instance with only the minimum required fields. - """ - self.validate() - return Preset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPreset": - """ - Create a :class:`RelatedPreset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPreset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPreset(guid=self.guid) - return RelatedPreset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -528,9 +469,6 @@ def _preset_to_nested(preset: Preset) -> PresetNested: is_incomplete=preset.is_incomplete, provenance_type=preset.provenance_type, home_id=preset.home_id, - depth=preset.depth, - immediate_upstream=preset.immediate_upstream, - immediate_downstream=preset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -560,6 +498,7 @@ def _preset_from_nested(nested: PresetNested) -> Preset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -568,9 +507,6 @@ def _preset_from_nested(nested: PresetNested) -> Preset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/preset_chart.py b/pyatlan_v9/model/assets/preset_chart.py index 0b8198afd..188aaba5b 100644 --- a/pyatlan_v9/model/assets/preset_chart.py +++ b/pyatlan_v9/model/assets/preset_chart.py @@ -45,7 +45,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .preset_related import RelatedPresetChart, RelatedPresetDashboard +from .preset_related import RelatedPresetDashboard from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -100,6 +100,8 @@ class PresetChart(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PresetChart" + preset_chart_description_markdown: Union[str, None, UnsetType] = UNSET """""" @@ -224,80 +226,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PresetChart instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.preset_dashboard is UNSET: - errors.append("preset_dashboard is required for creation") - if self.preset_dashboard_qualified_name is UNSET: - errors.append( - "preset_dashboard_qualified_name is required for creation" - ) - if self.preset_workspace_qualified_name is UNSET: - errors.append( - "preset_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"PresetChart validation failed: {errors}") - - def minimize(self) -> "PresetChart": - """ - Return a minimal copy of this PresetChart with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PresetChart with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PresetChart instance with only the minimum required fields. - """ - self.validate() - return PresetChart(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPresetChart": - """ - Create a :class:`RelatedPresetChart` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPresetChart reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPresetChart(guid=self.guid) - return RelatedPresetChart(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -611,9 +539,6 @@ def _preset_chart_to_nested(preset_chart: PresetChart) -> PresetChartNested: is_incomplete=preset_chart.is_incomplete, provenance_type=preset_chart.provenance_type, home_id=preset_chart.home_id, - depth=preset_chart.depth, - immediate_upstream=preset_chart.immediate_upstream, - immediate_downstream=preset_chart.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -645,6 +570,7 @@ def _preset_chart_from_nested(nested: PresetChartNested) -> PresetChart: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -653,9 +579,6 @@ def _preset_chart_from_nested(nested: PresetChartNested) -> PresetChart: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_chart_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/preset_dashboard.py b/pyatlan_v9/model/assets/preset_dashboard.py index f05894658..c98af870b 100644 --- a/pyatlan_v9/model/assets/preset_dashboard.py +++ b/pyatlan_v9/model/assets/preset_dashboard.py @@ -48,7 +48,6 @@ from .partial_related import RelatedPartialField, RelatedPartialObject from .preset_related import ( RelatedPresetChart, - RelatedPresetDashboard, RelatedPresetDataset, RelatedPresetWorkspace, ) @@ -112,6 +111,8 @@ class PresetDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PresetDashboard" + preset_dashboard_changed_by_name: Union[str, None, UnsetType] = UNSET """""" @@ -256,76 +257,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PresetDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.preset_workspace is UNSET: - errors.append("preset_workspace is required for creation") - if self.preset_workspace_qualified_name is UNSET: - errors.append( - "preset_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"PresetDashboard validation failed: {errors}") - - def minimize(self) -> "PresetDashboard": - """ - Return a minimal copy of this PresetDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PresetDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PresetDashboard instance with only the minimum required fields. - """ - self.validate() - return PresetDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPresetDashboard": - """ - Create a :class:`RelatedPresetDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPresetDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPresetDashboard(guid=self.guid) - return RelatedPresetDashboard(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -681,9 +612,6 @@ def _preset_dashboard_to_nested( is_incomplete=preset_dashboard.is_incomplete, provenance_type=preset_dashboard.provenance_type, home_id=preset_dashboard.home_id, - depth=preset_dashboard.depth, - immediate_upstream=preset_dashboard.immediate_upstream, - immediate_downstream=preset_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -717,6 +645,7 @@ def _preset_dashboard_from_nested(nested: PresetDashboardNested) -> PresetDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -725,9 +654,6 @@ def _preset_dashboard_from_nested(nested: PresetDashboardNested) -> PresetDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/preset_dataset.py b/pyatlan_v9/model/assets/preset_dataset.py index 97d042997..ff8ccff72 100644 --- a/pyatlan_v9/model/assets/preset_dataset.py +++ b/pyatlan_v9/model/assets/preset_dataset.py @@ -45,7 +45,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .preset_related import RelatedPresetDashboard, RelatedPresetDataset +from .preset_related import RelatedPresetDashboard from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -101,6 +101,8 @@ class PresetDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PresetDataset" + preset_dataset_datasource_name: Union[str, None, UnsetType] = UNSET """""" @@ -228,80 +230,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PresetDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.preset_dashboard is UNSET: - errors.append("preset_dashboard is required for creation") - if self.preset_dashboard_qualified_name is UNSET: - errors.append( - "preset_dashboard_qualified_name is required for creation" - ) - if self.preset_workspace_qualified_name is UNSET: - errors.append( - "preset_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"PresetDataset validation failed: {errors}") - - def minimize(self) -> "PresetDataset": - """ - Return a minimal copy of this PresetDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PresetDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PresetDataset instance with only the minimum required fields. - """ - self.validate() - return PresetDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPresetDataset": - """ - Create a :class:`RelatedPresetDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPresetDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPresetDataset(guid=self.guid) - return RelatedPresetDataset(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -620,9 +548,6 @@ def _preset_dataset_to_nested(preset_dataset: PresetDataset) -> PresetDatasetNes is_incomplete=preset_dataset.is_incomplete, provenance_type=preset_dataset.provenance_type, home_id=preset_dataset.home_id, - depth=preset_dataset.depth, - immediate_upstream=preset_dataset.immediate_upstream, - immediate_downstream=preset_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -656,6 +581,7 @@ def _preset_dataset_from_nested(nested: PresetDatasetNested) -> PresetDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -664,9 +590,6 @@ def _preset_dataset_from_nested(nested: PresetDatasetNested) -> PresetDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/preset_workspace.py b/pyatlan_v9/model/assets/preset_workspace.py index c396a2c39..c407e3ca6 100644 --- a/pyatlan_v9/model/assets/preset_workspace.py +++ b/pyatlan_v9/model/assets/preset_workspace.py @@ -44,7 +44,7 @@ from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .preset_related import RelatedPresetDashboard, RelatedPresetWorkspace +from .preset_related import RelatedPresetDashboard from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -106,6 +106,8 @@ class PresetWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PresetWorkspace" + preset_workspace_public_dashboards_allowed: Union[bool, None, UnsetType] = UNSET """""" @@ -243,66 +245,6 @@ class PresetWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "PresetWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PresetWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"PresetWorkspace validation failed: {errors}") - - def minimize(self) -> "PresetWorkspace": - """ - Return a minimal copy of this PresetWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PresetWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PresetWorkspace instance with only the minimum required fields. - """ - self.validate() - return PresetWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPresetWorkspace": - """ - Create a :class:`RelatedPresetWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPresetWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPresetWorkspace(guid=self.guid) - return RelatedPresetWorkspace(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -665,9 +607,6 @@ def _preset_workspace_to_nested( is_incomplete=preset_workspace.is_incomplete, provenance_type=preset_workspace.provenance_type, home_id=preset_workspace.home_id, - depth=preset_workspace.depth, - immediate_upstream=preset_workspace.immediate_upstream, - immediate_downstream=preset_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -701,6 +640,7 @@ def _preset_workspace_from_nested(nested: PresetWorkspaceNested) -> PresetWorksp updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -709,9 +649,6 @@ def _preset_workspace_from_nested(nested: PresetWorkspaceNested) -> PresetWorksp is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/procedure.py b/pyatlan_v9/model/assets/procedure.py index a37940343..54f468713 100644 --- a/pyatlan_v9/model/assets/procedure.py +++ b/pyatlan_v9/model/assets/procedure.py @@ -59,7 +59,7 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_related import RelatedProcedure, RelatedSchema +from .sql_related import RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -140,6 +140,8 @@ class Procedure(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Procedure" + definition: Union[str, None, UnsetType] = UNSET """SQL definition of the procedure.""" @@ -366,82 +368,6 @@ def __post_init__(self) -> None: r"^.+/_procedures_/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Procedure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if self.definition is UNSET: - errors.append("definition is required for creation") - if errors: - raise ValueError(f"Procedure validation failed: {errors}") - - def minimize(self) -> "Procedure": - """ - Return a minimal copy of this Procedure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Procedure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Procedure instance with only the minimum required fields. - """ - self.validate() - return Procedure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedProcedure": - """ - Create a :class:`RelatedProcedure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedProcedure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedProcedure(guid=self.guid) - return RelatedProcedure(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -961,9 +887,6 @@ def _procedure_to_nested(procedure: Procedure) -> ProcedureNested: is_incomplete=procedure.is_incomplete, provenance_type=procedure.provenance_type, home_id=procedure.home_id, - depth=procedure.depth, - immediate_upstream=procedure.immediate_upstream, - immediate_downstream=procedure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -995,6 +918,7 @@ def _procedure_from_nested(nested: ProcedureNested) -> Procedure: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1003,9 +927,6 @@ def _procedure_from_nested(nested: ProcedureNested) -> Procedure: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_procedure_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/process.py b/pyatlan_v9/model/assets/process.py index 8d77e6631..bfcbdc674 100644 --- a/pyatlan_v9/model/assets/process.py +++ b/pyatlan_v9/model/assets/process.py @@ -52,7 +52,7 @@ from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .power_bi_related import RelatedPowerBIDataflow -from .process_related import RelatedColumnProcess, RelatedProcess +from .process_related import RelatedColumnProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -109,6 +109,8 @@ class Process(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Process" + code: Union[str, None, UnsetType] = UNSET """Code that ran within the process.""" @@ -231,66 +233,6 @@ class Process(Asset): def __post_init__(self) -> None: self.type_name = "Process" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Process instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Process validation failed: {errors}") - - def minimize(self) -> "Process": - """ - Return a minimal copy of this Process with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Process with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Process instance with only the minimum required fields. - """ - self.validate() - return Process(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedProcess": - """ - Create a :class:`RelatedProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedProcess(guid=self.guid) - return RelatedProcess(qualified_name=self.qualified_name) - @staticmethod def _extract_guid(relationship: Any) -> Union[str, None]: """Extract guid from a relationship-like object.""" @@ -702,9 +644,6 @@ def _process_to_nested(process: Process) -> ProcessNested: is_incomplete=process.is_incomplete, provenance_type=process.provenance_type, home_id=process.home_id, - depth=process.depth, - immediate_upstream=process.immediate_upstream, - immediate_downstream=process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -734,6 +673,7 @@ def _process_from_nested(nested: ProcessNested) -> Process: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -742,9 +682,6 @@ def _process_from_nested(nested: ProcessNested) -> Process: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_process_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/process_execution.py b/pyatlan_v9/model/assets/process_execution.py index 4e83d5aa1..da966a369 100644 --- a/pyatlan_v9/model/assets/process_execution.py +++ b/pyatlan_v9/model/assets/process_execution.py @@ -27,7 +27,6 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedProcessExecution from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -72,6 +71,7 @@ class ProcessExecution(Referenceable): ANNOUNCEMENT_TYPE: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_AT: ClassVar[Any] = None ANNOUNCEMENT_UPDATED_BY: ClassVar[Any] = None + ASSET_ANNOUNCEMENT_EXPIRED_AT: ClassVar[Any] = None OWNER_USERS: ClassVar[Any] = None OWNER_GROUPS: ClassVar[Any] = None ADMIN_USERS: ClassVar[Any] = None @@ -261,6 +261,8 @@ class ProcessExecution(Referenceable): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ProcessExecution" + name: Union[str, None, UnsetType] = UNSET """Name of this asset. Fallback for display purposes, if displayName is empty.""" @@ -315,6 +317,9 @@ class ProcessExecution(Referenceable): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -988,66 +993,6 @@ class ProcessExecution(Referenceable): def __post_init__(self) -> None: self.type_name = "ProcessExecution" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ProcessExecution instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ProcessExecution validation failed: {errors}") - - def minimize(self) -> "ProcessExecution": - """ - Return a minimal copy of this ProcessExecution with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ProcessExecution with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ProcessExecution instance with only the minimum required fields. - """ - self.validate() - return ProcessExecution(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedProcessExecution": - """ - Create a :class:`RelatedProcessExecution` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedProcessExecution reference to this asset. - """ - if self.guid is not UNSET: - return RelatedProcessExecution(guid=self.guid) - return RelatedProcessExecution(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1159,6 +1104,9 @@ class ProcessExecutionAttributes(ReferenceableAttributes): announcement_updated_by: Union[str, None, UnsetType] = UNSET """Name of the user who last updated the announcement.""" + asset_announcement_expired_at: Union[int, None, UnsetType] = UNSET + """Time (epoch) at which the announcement expires, in milliseconds. When set, the announcement will no longer be displayed after this time.""" + owner_users: Union[Set[str], None, UnsetType] = UNSET """List of users who own this asset.""" @@ -1903,6 +1851,7 @@ def _populate_process_execution_attrs( attrs.announcement_type = obj.announcement_type attrs.announcement_updated_at = obj.announcement_updated_at attrs.announcement_updated_by = obj.announcement_updated_by + attrs.asset_announcement_expired_at = obj.asset_announcement_expired_at attrs.owner_users = obj.owner_users attrs.owner_groups = obj.owner_groups attrs.admin_users = obj.admin_users @@ -2148,6 +2097,7 @@ def _extract_process_execution_attrs(attrs: ProcessExecutionAttributes) -> dict: result["announcement_type"] = attrs.announcement_type result["announcement_updated_at"] = attrs.announcement_updated_at result["announcement_updated_by"] = attrs.announcement_updated_by + result["asset_announcement_expired_at"] = attrs.asset_announcement_expired_at result["owner_users"] = attrs.owner_users result["owner_groups"] = attrs.owner_groups result["admin_users"] = attrs.admin_users @@ -2440,9 +2390,6 @@ def _process_execution_to_nested( is_incomplete=process_execution.is_incomplete, provenance_type=process_execution.provenance_type, home_id=process_execution.home_id, - depth=process_execution.depth, - immediate_upstream=process_execution.immediate_upstream, - immediate_downstream=process_execution.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2476,6 +2423,7 @@ def _process_execution_from_nested(nested: ProcessExecutionNested) -> ProcessExe updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2484,9 +2432,6 @@ def _process_execution_from_nested(nested: ProcessExecutionNested) -> ProcessExe is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_process_execution_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2563,6 +2508,9 @@ def _process_execution_from_nested_bytes(data: bytes, serde: Serde) -> ProcessEx ProcessExecution.ANNOUNCEMENT_UPDATED_BY = KeywordField( "announcementUpdatedBy", "announcementUpdatedBy" ) +ProcessExecution.ASSET_ANNOUNCEMENT_EXPIRED_AT = NumericField( + "assetAnnouncementExpiredAt", "assetAnnouncementExpiredAt" +) ProcessExecution.OWNER_USERS = KeywordField("ownerUsers", "ownerUsers") ProcessExecution.OWNER_GROUPS = KeywordField("ownerGroups", "ownerGroups") ProcessExecution.ADMIN_USERS = KeywordField("adminUsers", "adminUsers") diff --git a/pyatlan_v9/model/assets/qlik.py b/pyatlan_v9/model/assets/qlik.py index ff9314832..2bd7da54a 100644 --- a/pyatlan_v9/model/assets/qlik.py +++ b/pyatlan_v9/model/assets/qlik.py @@ -45,7 +45,6 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlik from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -100,6 +99,8 @@ class Qlik(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Qlik" + qlik_id: Union[str, None, UnsetType] = UNSET """Identifier of this asset, from Qlik.""" @@ -219,66 +220,6 @@ class Qlik(Asset): def __post_init__(self) -> None: self.type_name = "Qlik" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Qlik instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Qlik validation failed: {errors}") - - def minimize(self) -> "Qlik": - """ - Return a minimal copy of this Qlik with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Qlik with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Qlik instance with only the minimum required fields. - """ - self.validate() - return Qlik(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlik": - """ - Create a :class:`RelatedQlik` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlik reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlik(guid=self.guid) - return RelatedQlik(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -561,9 +502,6 @@ def _qlik_to_nested(qlik: Qlik) -> QlikNested: is_incomplete=qlik.is_incomplete, provenance_type=qlik.provenance_type, home_id=qlik.home_id, - depth=qlik.depth, - immediate_upstream=qlik.immediate_upstream, - immediate_downstream=qlik.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -593,6 +531,7 @@ def _qlik_from_nested(nested: QlikNested) -> Qlik: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -601,9 +540,6 @@ def _qlik_from_nested(nested: QlikNested) -> Qlik: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/qlik_app.py b/pyatlan_v9/model/assets/qlik_app.py index ee119e1d9..f0c0337a9 100644 --- a/pyatlan_v9/model/assets/qlik_app.py +++ b/pyatlan_v9/model/assets/qlik_app.py @@ -46,7 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlikApp, RelatedQlikSheet, RelatedQlikSpace +from .qlik_related import RelatedQlikSheet, RelatedQlikSpace from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -108,6 +108,8 @@ class QlikApp(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QlikApp" + qlik_has_section_access: Union[bool, None, UnsetType] = UNSET """Whether section access or data masking is enabled on the source (true) or not (false).""" @@ -254,74 +256,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikApp instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_space is UNSET: - errors.append("qlik_space is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikApp validation failed: {errors}") - - def minimize(self) -> "QlikApp": - """ - Return a minimal copy of this QlikApp with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikApp with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikApp instance with only the minimum required fields. - """ - self.validate() - return QlikApp(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikApp": - """ - Create a :class:`RelatedQlikApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikApp(guid=self.guid) - return RelatedQlikApp(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -641,9 +575,6 @@ def _qlik_app_to_nested(qlik_app: QlikApp) -> QlikAppNested: is_incomplete=qlik_app.is_incomplete, provenance_type=qlik_app.provenance_type, home_id=qlik_app.home_id, - depth=qlik_app.depth, - immediate_upstream=qlik_app.immediate_upstream, - immediate_downstream=qlik_app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -673,6 +604,7 @@ def _qlik_app_from_nested(nested: QlikAppNested) -> QlikApp: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -681,9 +613,6 @@ def _qlik_app_from_nested(nested: QlikAppNested) -> QlikApp: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_app_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/qlik_chart.py b/pyatlan_v9/model/assets/qlik_chart.py index 3d082c25c..2ca336487 100644 --- a/pyatlan_v9/model/assets/qlik_chart.py +++ b/pyatlan_v9/model/assets/qlik_chart.py @@ -46,7 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlikChart, RelatedQlikColumn, RelatedQlikSheet +from .qlik_related import RelatedQlikColumn, RelatedQlikSheet from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -66,8 +66,8 @@ class QlikChart(Asset): QLIK_CHART_SUBTITLE: ClassVar[Any] = None QLIK_CHART_FOOTNOTE: ClassVar[Any] = None - QLIK_CHART_ORIENTATION: ClassVar[Any] = None - QLIK_CHART_TYPE: ClassVar[Any] = None + QLIK_ORIENTATION: ClassVar[Any] = None + QLIK_TYPE: ClassVar[Any] = None QLIK_ID: ClassVar[Any] = None QLIK_QRI: ClassVar[Any] = None QLIK_SPACE_ID: ClassVar[Any] = None @@ -107,16 +107,18 @@ class QlikChart(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QlikChart" + qlik_chart_subtitle: Union[str, None, UnsetType] = UNSET """Subtitle of this chart.""" qlik_chart_footnote: Union[str, None, UnsetType] = UNSET """Footnote of this chart.""" - qlik_chart_orientation: Union[str, None, UnsetType] = UNSET + qlik_orientation: Union[str, None, UnsetType] = UNSET """Orientation of this chart.""" - qlik_chart_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Subtype of this chart, for example: bar, graph, pie, etc.""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -252,76 +254,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikChart instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_sheet is UNSET: - errors.append("qlik_sheet is required for creation") - if self.qlik_app_qualified_name is UNSET: - errors.append("qlik_app_qualified_name is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikChart validation failed: {errors}") - - def minimize(self) -> "QlikChart": - """ - Return a minimal copy of this QlikChart with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikChart with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikChart instance with only the minimum required fields. - """ - self.validate() - return QlikChart(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikChart": - """ - Create a :class:`RelatedQlikChart` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikChart reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikChart(guid=self.guid) - return RelatedQlikChart(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -383,10 +315,10 @@ class QlikChartAttributes(AssetAttributes): qlik_chart_footnote: Union[str, None, UnsetType] = UNSET """Footnote of this chart.""" - qlik_chart_orientation: Union[str, None, UnsetType] = UNSET + qlik_orientation: Union[str, None, UnsetType] = UNSET """Orientation of this chart.""" - qlik_chart_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Subtype of this chart, for example: bar, graph, pie, etc.""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -573,8 +505,8 @@ def _populate_qlik_chart_attrs(attrs: QlikChartAttributes, obj: QlikChart) -> No _populate_asset_attrs(attrs, obj) attrs.qlik_chart_subtitle = obj.qlik_chart_subtitle attrs.qlik_chart_footnote = obj.qlik_chart_footnote - attrs.qlik_chart_orientation = obj.qlik_chart_orientation - attrs.qlik_chart_type = obj.qlik_chart_type + attrs.qlik_orientation = obj.qlik_orientation + attrs.qlik_type = obj.qlik_type attrs.qlik_id = obj.qlik_id attrs.qlik_qri = obj.qlik_qri attrs.qlik_space_id = obj.qlik_space_id @@ -590,8 +522,8 @@ def _extract_qlik_chart_attrs(attrs: QlikChartAttributes) -> dict: result = _extract_asset_attrs(attrs) result["qlik_chart_subtitle"] = attrs.qlik_chart_subtitle result["qlik_chart_footnote"] = attrs.qlik_chart_footnote - result["qlik_chart_orientation"] = attrs.qlik_chart_orientation - result["qlik_chart_type"] = attrs.qlik_chart_type + result["qlik_orientation"] = attrs.qlik_orientation + result["qlik_type"] = attrs.qlik_type result["qlik_id"] = attrs.qlik_id result["qlik_qri"] = attrs.qlik_qri result["qlik_space_id"] = attrs.qlik_space_id @@ -636,9 +568,6 @@ def _qlik_chart_to_nested(qlik_chart: QlikChart) -> QlikChartNested: is_incomplete=qlik_chart.is_incomplete, provenance_type=qlik_chart.provenance_type, home_id=qlik_chart.home_id, - depth=qlik_chart.depth, - immediate_upstream=qlik_chart.immediate_upstream, - immediate_downstream=qlik_chart.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -670,6 +599,7 @@ def _qlik_chart_from_nested(nested: QlikChartNested) -> QlikChart: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -678,9 +608,6 @@ def _qlik_chart_from_nested(nested: QlikChartNested) -> QlikChart: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_chart_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -710,10 +637,8 @@ def _qlik_chart_from_nested_bytes(data: bytes, serde: Serde) -> QlikChart: QlikChart.QLIK_CHART_SUBTITLE = KeywordField("qlikChartSubtitle", "qlikChartSubtitle") QlikChart.QLIK_CHART_FOOTNOTE = KeywordField("qlikChartFootnote", "qlikChartFootnote") -QlikChart.QLIK_CHART_ORIENTATION = KeywordField( - "qlikChartOrientation", "qlikChartOrientation" -) -QlikChart.QLIK_CHART_TYPE = KeywordField("qlikChartType", "qlikChartType") +QlikChart.QLIK_ORIENTATION = KeywordField("qlikOrientation", "qlikOrientation") +QlikChart.QLIK_TYPE = KeywordField("qlikType", "qlikType") QlikChart.QLIK_ID = KeywordField("qlikId", "qlikId") QlikChart.QLIK_QRI = KeywordTextField("qlikQRI", "qlikQRI", "qlikQRI.text") QlikChart.QLIK_SPACE_ID = KeywordField("qlikSpaceId", "qlikSpaceId") diff --git a/pyatlan_v9/model/assets/qlik_column.py b/pyatlan_v9/model/assets/qlik_column.py index 5c484c14c..94fcfd8be 100644 --- a/pyatlan_v9/model/assets/qlik_column.py +++ b/pyatlan_v9/model/assets/qlik_column.py @@ -46,12 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import ( - RelatedQlikChart, - RelatedQlikColumn, - RelatedQlikDataset, - RelatedQlikSheet, -) +from .qlik_related import RelatedQlikChart, RelatedQlikDataset, RelatedQlikSheet from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -113,6 +108,8 @@ class QlikColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QlikColumn" + qlik_column_name: Union[str, None, UnsetType] = UNSET """Qlik Column name.""" @@ -261,76 +258,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_sheet is UNSET: - errors.append("qlik_sheet is required for creation") - if self.qlik_app_qualified_name is UNSET: - errors.append("qlik_app_qualified_name is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikColumn validation failed: {errors}") - - def minimize(self) -> "QlikColumn": - """ - Return a minimal copy of this QlikColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikColumn instance with only the minimum required fields. - """ - self.validate() - return QlikColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikColumn": - """ - Create a :class:`RelatedQlikColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikColumn(guid=self.guid) - return RelatedQlikColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -649,9 +576,6 @@ def _qlik_column_to_nested(qlik_column: QlikColumn) -> QlikColumnNested: is_incomplete=qlik_column.is_incomplete, provenance_type=qlik_column.provenance_type, home_id=qlik_column.home_id, - depth=qlik_column.depth, - immediate_upstream=qlik_column.immediate_upstream, - immediate_downstream=qlik_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -683,6 +607,7 @@ def _qlik_column_from_nested(nested: QlikColumnNested) -> QlikColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -691,9 +616,6 @@ def _qlik_column_from_nested(nested: QlikColumnNested) -> QlikColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/qlik_dataset.py b/pyatlan_v9/model/assets/qlik_dataset.py index 19ef2a4c3..2f385b960 100644 --- a/pyatlan_v9/model/assets/qlik_dataset.py +++ b/pyatlan_v9/model/assets/qlik_dataset.py @@ -46,7 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlikColumn, RelatedQlikDataset, RelatedQlikSpace +from .qlik_related import RelatedQlikColumn, RelatedQlikSpace from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -108,6 +108,8 @@ class QlikDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QlikDataset" + qlik_dataset_technical_name: Union[str, None, UnsetType] = UNSET """Technical name of this asset.""" @@ -254,74 +256,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_space is UNSET: - errors.append("qlik_space is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikDataset validation failed: {errors}") - - def minimize(self) -> "QlikDataset": - """ - Return a minimal copy of this QlikDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikDataset instance with only the minimum required fields. - """ - self.validate() - return QlikDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikDataset": - """ - Create a :class:`RelatedQlikDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikDataset(guid=self.guid) - return RelatedQlikDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -643,9 +577,6 @@ def _qlik_dataset_to_nested(qlik_dataset: QlikDataset) -> QlikDatasetNested: is_incomplete=qlik_dataset.is_incomplete, provenance_type=qlik_dataset.provenance_type, home_id=qlik_dataset.home_id, - depth=qlik_dataset.depth, - immediate_upstream=qlik_dataset.immediate_upstream, - immediate_downstream=qlik_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -677,6 +608,7 @@ def _qlik_dataset_from_nested(nested: QlikDatasetNested) -> QlikDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -685,9 +617,6 @@ def _qlik_dataset_from_nested(nested: QlikDatasetNested) -> QlikDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/qlik_related.py b/pyatlan_v9/model/assets/qlik_related.py index c22f845a3..d84c99c01 100644 --- a/pyatlan_v9/model/assets/qlik_related.py +++ b/pyatlan_v9/model/assets/qlik_related.py @@ -86,10 +86,10 @@ class RelatedQlikChart(RelatedQlik): qlik_chart_footnote: Union[str, None, UnsetType] = UNSET """Footnote of this chart.""" - qlik_chart_orientation: Union[str, None, UnsetType] = UNSET + qlik_orientation: Union[str, None, UnsetType] = UNSET """Orientation of this chart.""" - qlik_chart_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Subtype of this chart, for example: bar, graph, pie, etc.""" def __post_init__(self) -> None: @@ -107,7 +107,7 @@ class RelatedQlikSheet(RelatedQlik): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QlikSheet" so it serializes correctly - qlik_sheet_is_approved: Union[bool, None, UnsetType] = UNSET + qlik_is_approved: Union[bool, None, UnsetType] = UNSET """Whether this is approved (true) or not (false).""" def __post_init__(self) -> None: @@ -125,7 +125,7 @@ class RelatedQlikSpace(RelatedQlik): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QlikSpace" so it serializes correctly - qlik_space_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Type of this space, for exmaple: Private, Shared, etc.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/qlik_sheet.py b/pyatlan_v9/model/assets/qlik_sheet.py index d7af2821d..215752d23 100644 --- a/pyatlan_v9/model/assets/qlik_sheet.py +++ b/pyatlan_v9/model/assets/qlik_sheet.py @@ -46,12 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import ( - RelatedQlikApp, - RelatedQlikChart, - RelatedQlikColumn, - RelatedQlikSheet, -) +from .qlik_related import RelatedQlikApp, RelatedQlikChart, RelatedQlikColumn from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -69,7 +64,7 @@ class QlikSheet(Asset): Instance of a Qlik sheet in Atlan. """ - QLIK_SHEET_IS_APPROVED: ClassVar[Any] = None + QLIK_IS_APPROVED: ClassVar[Any] = None QLIK_ID: ClassVar[Any] = None QLIK_QRI: ClassVar[Any] = None QLIK_SPACE_ID: ClassVar[Any] = None @@ -110,7 +105,9 @@ class QlikSheet(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - qlik_sheet_is_approved: Union[bool, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QlikSheet" + + qlik_is_approved: Union[bool, None, UnsetType] = UNSET """Whether this is approved (true) or not (false).""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -249,76 +246,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikSheet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_app is UNSET: - errors.append("qlik_app is required for creation") - if self.qlik_app_qualified_name is UNSET: - errors.append("qlik_app_qualified_name is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikSheet validation failed: {errors}") - - def minimize(self) -> "QlikSheet": - """ - Return a minimal copy of this QlikSheet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikSheet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikSheet instance with only the minimum required fields. - """ - self.validate() - return QlikSheet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikSheet": - """ - Create a :class:`RelatedQlikSheet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikSheet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikSheet(guid=self.guid) - return RelatedQlikSheet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -374,7 +301,7 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> QlikSheet: class QlikSheetAttributes(AssetAttributes): """QlikSheet-specific attributes for nested API format.""" - qlik_sheet_is_approved: Union[bool, None, UnsetType] = UNSET + qlik_is_approved: Union[bool, None, UnsetType] = UNSET """Whether this is approved (true) or not (false).""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -563,7 +490,7 @@ class QlikSheetNested(AssetNested): def _populate_qlik_sheet_attrs(attrs: QlikSheetAttributes, obj: QlikSheet) -> None: """Populate QlikSheet-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.qlik_sheet_is_approved = obj.qlik_sheet_is_approved + attrs.qlik_is_approved = obj.qlik_is_approved attrs.qlik_id = obj.qlik_id attrs.qlik_qri = obj.qlik_qri attrs.qlik_space_id = obj.qlik_space_id @@ -577,7 +504,7 @@ def _populate_qlik_sheet_attrs(attrs: QlikSheetAttributes, obj: QlikSheet) -> No def _extract_qlik_sheet_attrs(attrs: QlikSheetAttributes) -> dict: """Extract all QlikSheet attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["qlik_sheet_is_approved"] = attrs.qlik_sheet_is_approved + result["qlik_is_approved"] = attrs.qlik_is_approved result["qlik_id"] = attrs.qlik_id result["qlik_qri"] = attrs.qlik_qri result["qlik_space_id"] = attrs.qlik_space_id @@ -622,9 +549,6 @@ def _qlik_sheet_to_nested(qlik_sheet: QlikSheet) -> QlikSheetNested: is_incomplete=qlik_sheet.is_incomplete, provenance_type=qlik_sheet.provenance_type, home_id=qlik_sheet.home_id, - depth=qlik_sheet.depth, - immediate_upstream=qlik_sheet.immediate_upstream, - immediate_downstream=qlik_sheet.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -656,6 +580,7 @@ def _qlik_sheet_from_nested(nested: QlikSheetNested) -> QlikSheet: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -664,9 +589,6 @@ def _qlik_sheet_from_nested(nested: QlikSheetNested) -> QlikSheet: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_sheet_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -694,9 +616,7 @@ def _qlik_sheet_from_nested_bytes(data: bytes, serde: Serde) -> QlikSheet: RelationField, ) -QlikSheet.QLIK_SHEET_IS_APPROVED = BooleanField( - "qlikSheetIsApproved", "qlikSheetIsApproved" -) +QlikSheet.QLIK_IS_APPROVED = BooleanField("qlikIsApproved", "qlikIsApproved") QlikSheet.QLIK_ID = KeywordField("qlikId", "qlikId") QlikSheet.QLIK_QRI = KeywordTextField("qlikQRI", "qlikQRI", "qlikQRI.text") QlikSheet.QLIK_SPACE_ID = KeywordField("qlikSpaceId", "qlikSpaceId") diff --git a/pyatlan_v9/model/assets/qlik_space.py b/pyatlan_v9/model/assets/qlik_space.py index 170ae6f63..478cd871b 100644 --- a/pyatlan_v9/model/assets/qlik_space.py +++ b/pyatlan_v9/model/assets/qlik_space.py @@ -45,7 +45,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlikApp, RelatedQlikDataset, RelatedQlikSpace +from .qlik_related import RelatedQlikApp, RelatedQlikDataset from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -63,7 +63,7 @@ class QlikSpace(Asset): Instance of a Qlik space in Atlan. """ - QLIK_SPACE_TYPE: ClassVar[Any] = None + QLIK_TYPE: ClassVar[Any] = None QLIK_ID: ClassVar[Any] = None QLIK_QRI: ClassVar[Any] = None QLIK_SPACE_ID: ClassVar[Any] = None @@ -103,7 +103,9 @@ class QlikSpace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - qlik_space_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QlikSpace" + + qlik_type: Union[str, None, UnsetType] = UNSET """Type of this space, for exmaple: Private, Shared, etc.""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -231,66 +233,6 @@ class QlikSpace(Asset): def __post_init__(self) -> None: self.type_name = "QlikSpace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikSpace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"QlikSpace validation failed: {errors}") - - def minimize(self) -> "QlikSpace": - """ - Return a minimal copy of this QlikSpace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikSpace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikSpace instance with only the minimum required fields. - """ - self.validate() - return QlikSpace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikSpace": - """ - Create a :class:`RelatedQlikSpace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikSpace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikSpace(guid=self.guid) - return RelatedQlikSpace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -346,7 +288,7 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> QlikSpace: class QlikSpaceAttributes(AssetAttributes): """QlikSpace-specific attributes for nested API format.""" - qlik_space_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Type of this space, for exmaple: Private, Shared, etc.""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -531,7 +473,7 @@ class QlikSpaceNested(AssetNested): def _populate_qlik_space_attrs(attrs: QlikSpaceAttributes, obj: QlikSpace) -> None: """Populate QlikSpace-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.qlik_space_type = obj.qlik_space_type + attrs.qlik_type = obj.qlik_type attrs.qlik_id = obj.qlik_id attrs.qlik_qri = obj.qlik_qri attrs.qlik_space_id = obj.qlik_space_id @@ -545,7 +487,7 @@ def _populate_qlik_space_attrs(attrs: QlikSpaceAttributes, obj: QlikSpace) -> No def _extract_qlik_space_attrs(attrs: QlikSpaceAttributes) -> dict: """Extract all QlikSpace attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["qlik_space_type"] = attrs.qlik_space_type + result["qlik_type"] = attrs.qlik_type result["qlik_id"] = attrs.qlik_id result["qlik_qri"] = attrs.qlik_qri result["qlik_space_id"] = attrs.qlik_space_id @@ -590,9 +532,6 @@ def _qlik_space_to_nested(qlik_space: QlikSpace) -> QlikSpaceNested: is_incomplete=qlik_space.is_incomplete, provenance_type=qlik_space.provenance_type, home_id=qlik_space.home_id, - depth=qlik_space.depth, - immediate_upstream=qlik_space.immediate_upstream, - immediate_downstream=qlik_space.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -624,6 +563,7 @@ def _qlik_space_from_nested(nested: QlikSpaceNested) -> QlikSpace: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -632,9 +572,6 @@ def _qlik_space_from_nested(nested: QlikSpaceNested) -> QlikSpace: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_space_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -662,7 +599,7 @@ def _qlik_space_from_nested_bytes(data: bytes, serde: Serde) -> QlikSpace: RelationField, ) -QlikSpace.QLIK_SPACE_TYPE = KeywordField("qlikSpaceType", "qlikSpaceType") +QlikSpace.QLIK_TYPE = KeywordField("qlikType", "qlikType") QlikSpace.QLIK_ID = KeywordField("qlikId", "qlikId") QlikSpace.QLIK_QRI = KeywordTextField("qlikQRI", "qlikQRI", "qlikQRI.text") QlikSpace.QLIK_SPACE_ID = KeywordField("qlikSpaceId", "qlikSpaceId") diff --git a/pyatlan_v9/model/assets/query.py b/pyatlan_v9/model/assets/query.py index b483908da..82df7823c 100644 --- a/pyatlan_v9/model/assets/query.py +++ b/pyatlan_v9/model/assets/query.py @@ -61,7 +61,7 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_related import RelatedColumn, RelatedQuery, RelatedTable, RelatedView +from .sql_related import RelatedColumn, RelatedTable, RelatedView # ============================================================================= # FLAT ASSET CLASS @@ -144,6 +144,8 @@ class Query(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Query" + raw_query: Union[str, None, UnsetType] = UNSET """Deprecated. See 'longRawQuery' instead.""" @@ -374,72 +376,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Query instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.parent is UNSET: - errors.append("parent is required for creation") - if errors: - raise ValueError(f"Query validation failed: {errors}") - - def minimize(self) -> "Query": - """ - Return a minimal copy of this Query with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Query with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Query instance with only the minimum required fields. - """ - self.validate() - return Query(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuery": - """ - Create a :class:`RelatedQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuery(guid=self.guid) - return RelatedQuery(qualified_name=self.qualified_name) - @classmethod def creator( cls, @@ -991,9 +927,6 @@ def _query_to_nested(query: Query) -> QueryNested: is_incomplete=query.is_incomplete, provenance_type=query.provenance_type, home_id=query.home_id, - depth=query.depth, - immediate_upstream=query.immediate_upstream, - immediate_downstream=query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1023,6 +956,7 @@ def _query_from_nested(nested: QueryNested) -> Query: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1031,9 +965,6 @@ def _query_from_nested(nested: QueryNested) -> Query: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_query_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/quick_sight.py b/pyatlan_v9/model/assets/quick_sight.py index 9aaea2f45..b7ead6c4d 100644 --- a/pyatlan_v9/model/assets/quick_sight.py +++ b/pyatlan_v9/model/assets/quick_sight.py @@ -44,7 +44,6 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import RelatedQuickSight from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -94,6 +93,8 @@ class QuickSight(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QuickSight" + quick_sight_id: Union[str, None, UnsetType] = UNSET """Unique identifier for the QuickSight asset.""" @@ -198,66 +199,6 @@ class QuickSight(Asset): def __post_init__(self) -> None: self.type_name = "QuickSight" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSight instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"QuickSight validation failed: {errors}") - - def minimize(self) -> "QuickSight": - """ - Return a minimal copy of this QuickSight with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSight with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSight instance with only the minimum required fields. - """ - self.validate() - return QuickSight(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSight": - """ - Create a :class:`RelatedQuickSight` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSight reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSight(guid=self.guid) - return RelatedQuickSight(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -519,9 +460,6 @@ def _quick_sight_to_nested(quick_sight: QuickSight) -> QuickSightNested: is_incomplete=quick_sight.is_incomplete, provenance_type=quick_sight.provenance_type, home_id=quick_sight.home_id, - depth=quick_sight.depth, - immediate_upstream=quick_sight.immediate_upstream, - immediate_downstream=quick_sight.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -553,6 +491,7 @@ def _quick_sight_from_nested(nested: QuickSightNested) -> QuickSight: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -561,9 +500,6 @@ def _quick_sight_from_nested(nested: QuickSightNested) -> QuickSight: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/quick_sight_analysis.py b/pyatlan_v9/model/assets/quick_sight_analysis.py index 18e90c3fc..06a191928 100644 --- a/pyatlan_v9/model/assets/quick_sight_analysis.py +++ b/pyatlan_v9/model/assets/quick_sight_analysis.py @@ -47,7 +47,6 @@ from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .quick_sight_related import ( - RelatedQuickSightAnalysis, RelatedQuickSightAnalysisVisual, RelatedQuickSightFolder, ) @@ -68,7 +67,7 @@ class QuickSightAnalysis(Asset): Instance of a QuickSight analysis in Atlan. In QuickSight, you analyze and visualize your data in analyses, which can be published as a dashboard to share with others. """ - QUICK_SIGHT_ANALYSIS_STATUS: ClassVar[Any] = None + QUICK_SIGHT_STATUS: ClassVar[Any] = None QUICK_SIGHT_ANALYSIS_CALCULATED_FIELDS: ClassVar[Any] = None QUICK_SIGHT_ANALYSIS_PARAMETER_DECLARATIONS: ClassVar[Any] = None QUICK_SIGHT_ANALYSIS_FILTER_GROUPS: ClassVar[Any] = None @@ -106,7 +105,9 @@ class QuickSightAnalysis(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_analysis_status: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightAnalysis" + + quick_sight_status: Union[str, None, UnsetType] = UNSET """Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc.""" quick_sight_analysis_calculated_fields: Union[List[str], None, UnsetType] = UNSET @@ -240,72 +241,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightAnalysis instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_analysis_folders is UNSET: - errors.append("quick_sight_analysis_folders is required for creation") - if errors: - raise ValueError(f"QuickSightAnalysis validation failed: {errors}") - - def minimize(self) -> "QuickSightAnalysis": - """ - Return a minimal copy of this QuickSightAnalysis with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightAnalysis with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightAnalysis instance with only the minimum required fields. - """ - self.validate() - return QuickSightAnalysis(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSightAnalysis": - """ - Create a :class:`RelatedQuickSightAnalysis` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightAnalysis reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightAnalysis(guid=self.guid) - return RelatedQuickSightAnalysis(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -400,7 +335,7 @@ def from_json( class QuickSightAnalysisAttributes(AssetAttributes): """QuickSightAnalysis-specific attributes for nested API format.""" - quick_sight_analysis_status: Union[str, None, UnsetType] = UNSET + quick_sight_status: Union[str, None, UnsetType] = UNSET """Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc.""" quick_sight_analysis_calculated_fields: Union[List[str], None, UnsetType] = UNSET @@ -589,7 +524,7 @@ def _populate_quick_sight_analysis_attrs( ) -> None: """Populate QuickSightAnalysis-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_analysis_status = obj.quick_sight_analysis_status + attrs.quick_sight_status = obj.quick_sight_status attrs.quick_sight_analysis_calculated_fields = ( obj.quick_sight_analysis_calculated_fields ) @@ -605,7 +540,7 @@ def _populate_quick_sight_analysis_attrs( def _extract_quick_sight_analysis_attrs(attrs: QuickSightAnalysisAttributes) -> dict: """Extract all QuickSightAnalysis attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_analysis_status"] = attrs.quick_sight_analysis_status + result["quick_sight_status"] = attrs.quick_sight_status result["quick_sight_analysis_calculated_fields"] = ( attrs.quick_sight_analysis_calculated_fields ) @@ -658,9 +593,6 @@ def _quick_sight_analysis_to_nested( is_incomplete=quick_sight_analysis.is_incomplete, provenance_type=quick_sight_analysis.provenance_type, home_id=quick_sight_analysis.home_id, - depth=quick_sight_analysis.depth, - immediate_upstream=quick_sight_analysis.immediate_upstream, - immediate_downstream=quick_sight_analysis.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -696,6 +628,7 @@ def _quick_sight_analysis_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -704,9 +637,6 @@ def _quick_sight_analysis_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_analysis_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -737,8 +667,8 @@ def _quick_sight_analysis_from_nested_bytes( RelationField, ) -QuickSightAnalysis.QUICK_SIGHT_ANALYSIS_STATUS = KeywordField( - "quickSightAnalysisStatus", "quickSightAnalysisStatus" +QuickSightAnalysis.QUICK_SIGHT_STATUS = KeywordField( + "quickSightStatus", "quickSightStatus" ) QuickSightAnalysis.QUICK_SIGHT_ANALYSIS_CALCULATED_FIELDS = KeywordField( "quickSightAnalysisCalculatedFields", "quickSightAnalysisCalculatedFields" diff --git a/pyatlan_v9/model/assets/quick_sight_analysis_visual.py b/pyatlan_v9/model/assets/quick_sight_analysis_visual.py index ef7c7e6a0..679fcc614 100644 --- a/pyatlan_v9/model/assets/quick_sight_analysis_visual.py +++ b/pyatlan_v9/model/assets/quick_sight_analysis_visual.py @@ -46,10 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import ( - RelatedQuickSightAnalysis, - RelatedQuickSightAnalysisVisual, -) +from .quick_sight_related import RelatedQuickSightAnalysis from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -101,6 +98,8 @@ class QuickSightAnalysisVisual(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QuickSightAnalysisVisual" + quick_sight_analysis_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the QuickSight analysis in which this visual exists.""" @@ -219,78 +218,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightAnalysisVisual instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_analysis is UNSET: - errors.append("quick_sight_analysis is required for creation") - if self.quick_sight_analysis_qualified_name is UNSET: - errors.append( - "quick_sight_analysis_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"QuickSightAnalysisVisual validation failed: {errors}") - - def minimize(self) -> "QuickSightAnalysisVisual": - """ - Return a minimal copy of this QuickSightAnalysisVisual with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightAnalysisVisual with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightAnalysisVisual instance with only the minimum required fields. - """ - self.validate() - return QuickSightAnalysisVisual( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedQuickSightAnalysisVisual": - """ - Create a :class:`RelatedQuickSightAnalysisVisual` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightAnalysisVisual reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightAnalysisVisual(guid=self.guid) - return RelatedQuickSightAnalysisVisual(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -638,9 +565,6 @@ def _quick_sight_analysis_visual_to_nested( is_incomplete=quick_sight_analysis_visual.is_incomplete, provenance_type=quick_sight_analysis_visual.provenance_type, home_id=quick_sight_analysis_visual.home_id, - depth=quick_sight_analysis_visual.depth, - immediate_upstream=quick_sight_analysis_visual.immediate_upstream, - immediate_downstream=quick_sight_analysis_visual.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -676,6 +600,7 @@ def _quick_sight_analysis_visual_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -684,9 +609,6 @@ def _quick_sight_analysis_visual_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_analysis_visual_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/quick_sight_dashboard.py b/pyatlan_v9/model/assets/quick_sight_dashboard.py index 1f3e1d1b8..00ac3eb53 100644 --- a/pyatlan_v9/model/assets/quick_sight_dashboard.py +++ b/pyatlan_v9/model/assets/quick_sight_dashboard.py @@ -47,7 +47,6 @@ from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .quick_sight_related import ( - RelatedQuickSightDashboard, RelatedQuickSightDashboardVisual, RelatedQuickSightFolder, ) @@ -68,8 +67,8 @@ class QuickSightDashboard(Asset): Instance of a QuickSight dashboard in Atlan. These are reports in QuickSight, created from analyses. """ - QUICK_SIGHT_DASHBOARD_PUBLISHED_VERSION_NUMBER: ClassVar[Any] = None - QUICK_SIGHT_DASHBOARD_LAST_PUBLISHED_TIME: ClassVar[Any] = None + QUICK_SIGHT_PUBLISHED_VERSION_NUMBER: ClassVar[Any] = None + QUICK_SIGHT_LAST_PUBLISHED_TIME: ClassVar[Any] = None QUICK_SIGHT_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_NAME: ClassVar[Any] = None @@ -104,10 +103,12 @@ class QuickSightDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_dashboard_published_version_number: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightDashboard" + + quick_sight_published_version_number: Union[int, None, UnsetType] = UNSET """Version number of the published dashboard.""" - quick_sight_dashboard_last_published_time: Union[int, None, UnsetType] = UNSET + quick_sight_last_published_time: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this dashboard was last published, in milliseconds.""" quick_sight_id: Union[str, None, UnsetType] = UNSET @@ -230,72 +231,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_dashboard_folders is UNSET: - errors.append("quick_sight_dashboard_folders is required for creation") - if errors: - raise ValueError(f"QuickSightDashboard validation failed: {errors}") - - def minimize(self) -> "QuickSightDashboard": - """ - Return a minimal copy of this QuickSightDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightDashboard instance with only the minimum required fields. - """ - self.validate() - return QuickSightDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSightDashboard": - """ - Create a :class:`RelatedQuickSightDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightDashboard(guid=self.guid) - return RelatedQuickSightDashboard(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -399,10 +334,10 @@ def from_json( class QuickSightDashboardAttributes(AssetAttributes): """QuickSightDashboard-specific attributes for nested API format.""" - quick_sight_dashboard_published_version_number: Union[int, None, UnsetType] = UNSET + quick_sight_published_version_number: Union[int, None, UnsetType] = UNSET """Version number of the published dashboard.""" - quick_sight_dashboard_last_published_time: Union[int, None, UnsetType] = UNSET + quick_sight_last_published_time: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this dashboard was last published, in milliseconds.""" quick_sight_id: Union[str, None, UnsetType] = UNSET @@ -580,12 +515,10 @@ def _populate_quick_sight_dashboard_attrs( ) -> None: """Populate QuickSightDashboard-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_dashboard_published_version_number = ( - obj.quick_sight_dashboard_published_version_number - ) - attrs.quick_sight_dashboard_last_published_time = ( - obj.quick_sight_dashboard_last_published_time + attrs.quick_sight_published_version_number = ( + obj.quick_sight_published_version_number ) + attrs.quick_sight_last_published_time = obj.quick_sight_last_published_time attrs.quick_sight_id = obj.quick_sight_id attrs.quick_sight_sheet_id = obj.quick_sight_sheet_id attrs.quick_sight_sheet_name = obj.quick_sight_sheet_name @@ -594,12 +527,10 @@ def _populate_quick_sight_dashboard_attrs( def _extract_quick_sight_dashboard_attrs(attrs: QuickSightDashboardAttributes) -> dict: """Extract all QuickSightDashboard attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_dashboard_published_version_number"] = ( - attrs.quick_sight_dashboard_published_version_number - ) - result["quick_sight_dashboard_last_published_time"] = ( - attrs.quick_sight_dashboard_last_published_time + result["quick_sight_published_version_number"] = ( + attrs.quick_sight_published_version_number ) + result["quick_sight_last_published_time"] = attrs.quick_sight_last_published_time result["quick_sight_id"] = attrs.quick_sight_id result["quick_sight_sheet_id"] = attrs.quick_sight_sheet_id result["quick_sight_sheet_name"] = attrs.quick_sight_sheet_name @@ -643,9 +574,6 @@ def _quick_sight_dashboard_to_nested( is_incomplete=quick_sight_dashboard.is_incomplete, provenance_type=quick_sight_dashboard.provenance_type, home_id=quick_sight_dashboard.home_id, - depth=quick_sight_dashboard.depth, - immediate_upstream=quick_sight_dashboard.immediate_upstream, - immediate_downstream=quick_sight_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -681,6 +609,7 @@ def _quick_sight_dashboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -689,9 +618,6 @@ def _quick_sight_dashboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -723,12 +649,11 @@ def _quick_sight_dashboard_from_nested_bytes( RelationField, ) -QuickSightDashboard.QUICK_SIGHT_DASHBOARD_PUBLISHED_VERSION_NUMBER = NumericField( - "quickSightDashboardPublishedVersionNumber", - "quickSightDashboardPublishedVersionNumber", +QuickSightDashboard.QUICK_SIGHT_PUBLISHED_VERSION_NUMBER = NumericField( + "quickSightPublishedVersionNumber", "quickSightPublishedVersionNumber" ) -QuickSightDashboard.QUICK_SIGHT_DASHBOARD_LAST_PUBLISHED_TIME = NumericField( - "quickSightDashboardLastPublishedTime", "quickSightDashboardLastPublishedTime" +QuickSightDashboard.QUICK_SIGHT_LAST_PUBLISHED_TIME = NumericField( + "quickSightLastPublishedTime", "quickSightLastPublishedTime" ) QuickSightDashboard.QUICK_SIGHT_ID = KeywordField("quickSightId", "quickSightId") QuickSightDashboard.QUICK_SIGHT_SHEET_ID = KeywordField( diff --git a/pyatlan_v9/model/assets/quick_sight_dashboard_visual.py b/pyatlan_v9/model/assets/quick_sight_dashboard_visual.py index a8028d24c..36182dce7 100644 --- a/pyatlan_v9/model/assets/quick_sight_dashboard_visual.py +++ b/pyatlan_v9/model/assets/quick_sight_dashboard_visual.py @@ -46,10 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import ( - RelatedQuickSightDashboard, - RelatedQuickSightDashboardVisual, -) +from .quick_sight_related import RelatedQuickSightDashboard from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -101,6 +98,8 @@ class QuickSightDashboardVisual(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QuickSightDashboardVisual" + quick_sight_dashboard_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the dashboard in which this visual exists.""" @@ -219,78 +218,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightDashboardVisual instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_dashboard is UNSET: - errors.append("quick_sight_dashboard is required for creation") - if self.quick_sight_dashboard_qualified_name is UNSET: - errors.append( - "quick_sight_dashboard_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"QuickSightDashboardVisual validation failed: {errors}") - - def minimize(self) -> "QuickSightDashboardVisual": - """ - Return a minimal copy of this QuickSightDashboardVisual with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightDashboardVisual with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightDashboardVisual instance with only the minimum required fields. - """ - self.validate() - return QuickSightDashboardVisual( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedQuickSightDashboardVisual": - """ - Create a :class:`RelatedQuickSightDashboardVisual` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightDashboardVisual reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightDashboardVisual(guid=self.guid) - return RelatedQuickSightDashboardVisual(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -640,9 +567,6 @@ def _quick_sight_dashboard_visual_to_nested( is_incomplete=quick_sight_dashboard_visual.is_incomplete, provenance_type=quick_sight_dashboard_visual.provenance_type, home_id=quick_sight_dashboard_visual.home_id, - depth=quick_sight_dashboard_visual.depth, - immediate_upstream=quick_sight_dashboard_visual.immediate_upstream, - immediate_downstream=quick_sight_dashboard_visual.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -678,6 +602,7 @@ def _quick_sight_dashboard_visual_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -686,9 +611,6 @@ def _quick_sight_dashboard_visual_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_dashboard_visual_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/quick_sight_dataset.py b/pyatlan_v9/model/assets/quick_sight_dataset.py index df12fd3f8..86cb8111c 100644 --- a/pyatlan_v9/model/assets/quick_sight_dataset.py +++ b/pyatlan_v9/model/assets/quick_sight_dataset.py @@ -46,11 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import ( - RelatedQuickSightDataset, - RelatedQuickSightDatasetField, - RelatedQuickSightFolder, -) +from .quick_sight_related import RelatedQuickSightDatasetField, RelatedQuickSightFolder from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -68,8 +64,8 @@ class QuickSightDataset(Asset): Instance of a QuickSight dataset in Atlan. These are an internal data model built to be used by analysis. In a dataset, data can be pulled from different sources, joined, filtered, and columns translated to more business-friendly names when preparing the data for visualizing in the analysis layer. """ - QUICK_SIGHT_DATASET_IMPORT_MODE: ClassVar[Any] = None - QUICK_SIGHT_DATASET_COLUMN_COUNT: ClassVar[Any] = None + QUICK_SIGHT_IMPORT_MODE: ClassVar[Any] = None + QUICK_SIGHT_COLUMN_COUNT: ClassVar[Any] = None QUICK_SIGHT_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_NAME: ClassVar[Any] = None @@ -104,10 +100,12 @@ class QuickSightDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_dataset_import_mode: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightDataset" + + quick_sight_import_mode: Union[str, None, UnsetType] = UNSET """Import mode for this dataset, for example: SPICE or DIRECT_QUERY.""" - quick_sight_dataset_column_count: Union[int, None, UnsetType] = UNSET + quick_sight_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this dataset.""" quick_sight_id: Union[str, None, UnsetType] = UNSET @@ -230,72 +228,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_dataset_folders is UNSET: - errors.append("quick_sight_dataset_folders is required for creation") - if errors: - raise ValueError(f"QuickSightDataset validation failed: {errors}") - - def minimize(self) -> "QuickSightDataset": - """ - Return a minimal copy of this QuickSightDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightDataset instance with only the minimum required fields. - """ - self.validate() - return QuickSightDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSightDataset": - """ - Create a :class:`RelatedQuickSightDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightDataset(guid=self.guid) - return RelatedQuickSightDataset(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -403,10 +335,10 @@ def from_json( class QuickSightDatasetAttributes(AssetAttributes): """QuickSightDataset-specific attributes for nested API format.""" - quick_sight_dataset_import_mode: Union[str, None, UnsetType] = UNSET + quick_sight_import_mode: Union[str, None, UnsetType] = UNSET """Import mode for this dataset, for example: SPICE or DIRECT_QUERY.""" - quick_sight_dataset_column_count: Union[int, None, UnsetType] = UNSET + quick_sight_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this dataset.""" quick_sight_id: Union[str, None, UnsetType] = UNSET @@ -584,8 +516,8 @@ def _populate_quick_sight_dataset_attrs( ) -> None: """Populate QuickSightDataset-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_dataset_import_mode = obj.quick_sight_dataset_import_mode - attrs.quick_sight_dataset_column_count = obj.quick_sight_dataset_column_count + attrs.quick_sight_import_mode = obj.quick_sight_import_mode + attrs.quick_sight_column_count = obj.quick_sight_column_count attrs.quick_sight_id = obj.quick_sight_id attrs.quick_sight_sheet_id = obj.quick_sight_sheet_id attrs.quick_sight_sheet_name = obj.quick_sight_sheet_name @@ -594,8 +526,8 @@ def _populate_quick_sight_dataset_attrs( def _extract_quick_sight_dataset_attrs(attrs: QuickSightDatasetAttributes) -> dict: """Extract all QuickSightDataset attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_dataset_import_mode"] = attrs.quick_sight_dataset_import_mode - result["quick_sight_dataset_column_count"] = attrs.quick_sight_dataset_column_count + result["quick_sight_import_mode"] = attrs.quick_sight_import_mode + result["quick_sight_column_count"] = attrs.quick_sight_column_count result["quick_sight_id"] = attrs.quick_sight_id result["quick_sight_sheet_id"] = attrs.quick_sight_sheet_id result["quick_sight_sheet_name"] = attrs.quick_sight_sheet_name @@ -639,9 +571,6 @@ def _quick_sight_dataset_to_nested( is_incomplete=quick_sight_dataset.is_incomplete, provenance_type=quick_sight_dataset.provenance_type, home_id=quick_sight_dataset.home_id, - depth=quick_sight_dataset.depth, - immediate_upstream=quick_sight_dataset.immediate_upstream, - immediate_downstream=quick_sight_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -677,6 +606,7 @@ def _quick_sight_dataset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -685,9 +615,6 @@ def _quick_sight_dataset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -719,11 +646,11 @@ def _quick_sight_dataset_from_nested_bytes( RelationField, ) -QuickSightDataset.QUICK_SIGHT_DATASET_IMPORT_MODE = KeywordField( - "quickSightDatasetImportMode", "quickSightDatasetImportMode" +QuickSightDataset.QUICK_SIGHT_IMPORT_MODE = KeywordField( + "quickSightImportMode", "quickSightImportMode" ) -QuickSightDataset.QUICK_SIGHT_DATASET_COLUMN_COUNT = NumericField( - "quickSightDatasetColumnCount", "quickSightDatasetColumnCount" +QuickSightDataset.QUICK_SIGHT_COLUMN_COUNT = NumericField( + "quickSightColumnCount", "quickSightColumnCount" ) QuickSightDataset.QUICK_SIGHT_ID = KeywordField("quickSightId", "quickSightId") QuickSightDataset.QUICK_SIGHT_SHEET_ID = KeywordField( diff --git a/pyatlan_v9/model/assets/quick_sight_dataset_field.py b/pyatlan_v9/model/assets/quick_sight_dataset_field.py index ea83668b5..07b2ad061 100644 --- a/pyatlan_v9/model/assets/quick_sight_dataset_field.py +++ b/pyatlan_v9/model/assets/quick_sight_dataset_field.py @@ -46,7 +46,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import RelatedQuickSightDataset, RelatedQuickSightDatasetField +from .quick_sight_related import RelatedQuickSightDataset from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -64,7 +64,7 @@ class QuickSightDatasetField(Asset): Instance of a QuickSight dataset field in Atlan. """ - QUICK_SIGHT_DATASET_FIELD_TYPE: ClassVar[Any] = None + QUICK_SIGHT_TYPE: ClassVar[Any] = None QUICK_SIGHT_DATASET_QUALIFIED_NAME: ClassVar[Any] = None QUICK_SIGHT_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_ID: ClassVar[Any] = None @@ -99,7 +99,9 @@ class QuickSightDatasetField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_dataset_field_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightDatasetField" + + quick_sight_type: Union[str, None, UnsetType] = UNSET """Datatype of this field, for example: STRING, INTEGER, etc.""" quick_sight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET @@ -220,78 +222,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightDatasetField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_dataset is UNSET: - errors.append("quick_sight_dataset is required for creation") - if self.quick_sight_dataset_qualified_name is UNSET: - errors.append( - "quick_sight_dataset_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"QuickSightDatasetField validation failed: {errors}") - - def minimize(self) -> "QuickSightDatasetField": - """ - Return a minimal copy of this QuickSightDatasetField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightDatasetField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightDatasetField instance with only the minimum required fields. - """ - self.validate() - return QuickSightDatasetField( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedQuickSightDatasetField": - """ - Create a :class:`RelatedQuickSightDatasetField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightDatasetField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightDatasetField(guid=self.guid) - return RelatedQuickSightDatasetField(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -404,7 +334,7 @@ def from_json( class QuickSightDatasetFieldAttributes(AssetAttributes): """QuickSightDatasetField-specific attributes for nested API format.""" - quick_sight_dataset_field_type: Union[str, None, UnsetType] = UNSET + quick_sight_type: Union[str, None, UnsetType] = UNSET """Datatype of this field, for example: STRING, INTEGER, etc.""" quick_sight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET @@ -577,7 +507,7 @@ def _populate_quick_sight_dataset_field_attrs( ) -> None: """Populate QuickSightDatasetField-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_dataset_field_type = obj.quick_sight_dataset_field_type + attrs.quick_sight_type = obj.quick_sight_type attrs.quick_sight_dataset_qualified_name = obj.quick_sight_dataset_qualified_name attrs.quick_sight_id = obj.quick_sight_id attrs.quick_sight_sheet_id = obj.quick_sight_sheet_id @@ -589,7 +519,7 @@ def _extract_quick_sight_dataset_field_attrs( ) -> dict: """Extract all QuickSightDatasetField attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_dataset_field_type"] = attrs.quick_sight_dataset_field_type + result["quick_sight_type"] = attrs.quick_sight_type result["quick_sight_dataset_qualified_name"] = ( attrs.quick_sight_dataset_qualified_name ) @@ -636,9 +566,6 @@ def _quick_sight_dataset_field_to_nested( is_incomplete=quick_sight_dataset_field.is_incomplete, provenance_type=quick_sight_dataset_field.provenance_type, home_id=quick_sight_dataset_field.home_id, - depth=quick_sight_dataset_field.depth, - immediate_upstream=quick_sight_dataset_field.immediate_upstream, - immediate_downstream=quick_sight_dataset_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -674,6 +601,7 @@ def _quick_sight_dataset_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -682,9 +610,6 @@ def _quick_sight_dataset_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_dataset_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -715,8 +640,8 @@ def _quick_sight_dataset_field_from_nested_bytes( RelationField, ) -QuickSightDatasetField.QUICK_SIGHT_DATASET_FIELD_TYPE = KeywordField( - "quickSightDatasetFieldType", "quickSightDatasetFieldType" +QuickSightDatasetField.QUICK_SIGHT_TYPE = KeywordField( + "quickSightType", "quickSightType" ) QuickSightDatasetField.QUICK_SIGHT_DATASET_QUALIFIED_NAME = KeywordTextField( "quickSightDatasetQualifiedName", diff --git a/pyatlan_v9/model/assets/quick_sight_folder.py b/pyatlan_v9/model/assets/quick_sight_folder.py index ed8bcd6a9..582a37fa3 100644 --- a/pyatlan_v9/model/assets/quick_sight_folder.py +++ b/pyatlan_v9/model/assets/quick_sight_folder.py @@ -49,7 +49,6 @@ RelatedQuickSightAnalysis, RelatedQuickSightDashboard, RelatedQuickSightDataset, - RelatedQuickSightFolder, ) from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -68,7 +67,7 @@ class QuickSightFolder(Asset): Instance of a QuickSight folder in Atlan. """ - QUICK_SIGHT_FOLDER_TYPE: ClassVar[Any] = None + QUICK_SIGHT_TYPE: ClassVar[Any] = None QUICK_SIGHT_FOLDER_HIERARCHY: ClassVar[Any] = None QUICK_SIGHT_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_ID: ClassVar[Any] = None @@ -105,7 +104,9 @@ class QuickSightFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_folder_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightFolder" + + quick_sight_type: Union[str, None, UnsetType] = UNSET """Type of this folder, for example: SHARED.""" quick_sight_folder_hierarchy: Union[List[Dict[str, str]], None, UnsetType] = UNSET @@ -228,66 +229,6 @@ class QuickSightFolder(Asset): def __post_init__(self) -> None: self.type_name = "QuickSightFolder" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"QuickSightFolder validation failed: {errors}") - - def minimize(self) -> "QuickSightFolder": - """ - Return a minimal copy of this QuickSightFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightFolder instance with only the minimum required fields. - """ - self.validate() - return QuickSightFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSightFolder": - """ - Create a :class:`RelatedQuickSightFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightFolder(guid=self.guid) - return RelatedQuickSightFolder(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -385,7 +326,7 @@ def from_json( class QuickSightFolderAttributes(AssetAttributes): """QuickSightFolder-specific attributes for nested API format.""" - quick_sight_folder_type: Union[str, None, UnsetType] = UNSET + quick_sight_type: Union[str, None, UnsetType] = UNSET """Type of this folder, for example: SHARED.""" quick_sight_folder_hierarchy: Union[List[Dict[str, str]], None, UnsetType] = UNSET @@ -570,7 +511,7 @@ def _populate_quick_sight_folder_attrs( ) -> None: """Populate QuickSightFolder-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_folder_type = obj.quick_sight_folder_type + attrs.quick_sight_type = obj.quick_sight_type attrs.quick_sight_folder_hierarchy = obj.quick_sight_folder_hierarchy attrs.quick_sight_id = obj.quick_sight_id attrs.quick_sight_sheet_id = obj.quick_sight_sheet_id @@ -580,7 +521,7 @@ def _populate_quick_sight_folder_attrs( def _extract_quick_sight_folder_attrs(attrs: QuickSightFolderAttributes) -> dict: """Extract all QuickSightFolder attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_folder_type"] = attrs.quick_sight_folder_type + result["quick_sight_type"] = attrs.quick_sight_type result["quick_sight_folder_hierarchy"] = attrs.quick_sight_folder_hierarchy result["quick_sight_id"] = attrs.quick_sight_id result["quick_sight_sheet_id"] = attrs.quick_sight_sheet_id @@ -625,9 +566,6 @@ def _quick_sight_folder_to_nested( is_incomplete=quick_sight_folder.is_incomplete, provenance_type=quick_sight_folder.provenance_type, home_id=quick_sight_folder.home_id, - depth=quick_sight_folder.depth, - immediate_upstream=quick_sight_folder.immediate_upstream, - immediate_downstream=quick_sight_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -661,6 +599,7 @@ def _quick_sight_folder_from_nested(nested: QuickSightFolderNested) -> QuickSigh updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -669,9 +608,6 @@ def _quick_sight_folder_from_nested(nested: QuickSightFolderNested) -> QuickSigh is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -702,9 +638,7 @@ def _quick_sight_folder_from_nested_bytes( RelationField, ) -QuickSightFolder.QUICK_SIGHT_FOLDER_TYPE = KeywordField( - "quickSightFolderType", "quickSightFolderType" -) +QuickSightFolder.QUICK_SIGHT_TYPE = KeywordField("quickSightType", "quickSightType") QuickSightFolder.QUICK_SIGHT_FOLDER_HIERARCHY = KeywordField( "quickSightFolderHierarchy", "quickSightFolderHierarchy" ) diff --git a/pyatlan_v9/model/assets/quick_sight_related.py b/pyatlan_v9/model/assets/quick_sight_related.py index edf865e62..eee224863 100644 --- a/pyatlan_v9/model/assets/quick_sight_related.py +++ b/pyatlan_v9/model/assets/quick_sight_related.py @@ -82,10 +82,10 @@ class RelatedQuickSightDataset(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightDataset" so it serializes correctly - quick_sight_dataset_import_mode: Union[str, None, UnsetType] = UNSET + quick_sight_import_mode: Union[str, None, UnsetType] = UNSET """Import mode for this dataset, for example: SPICE or DIRECT_QUERY.""" - quick_sight_dataset_column_count: Union[int, None, UnsetType] = UNSET + quick_sight_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this dataset.""" def __post_init__(self) -> None: @@ -103,7 +103,7 @@ class RelatedQuickSightDatasetField(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightDatasetField" so it serializes correctly - quick_sight_dataset_field_type: Union[str, None, UnsetType] = UNSET + quick_sight_type: Union[str, None, UnsetType] = UNSET """Datatype of this field, for example: STRING, INTEGER, etc.""" quick_sight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET @@ -124,7 +124,7 @@ class RelatedQuickSightFolder(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightFolder" so it serializes correctly - quick_sight_folder_type: Union[str, None, UnsetType] = UNSET + quick_sight_type: Union[str, None, UnsetType] = UNSET """Type of this folder, for example: SHARED.""" quick_sight_folder_hierarchy: Union[List[Dict[str, str]], None, UnsetType] = UNSET @@ -145,7 +145,7 @@ class RelatedQuickSightAnalysis(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightAnalysis" so it serializes correctly - quick_sight_analysis_status: Union[str, None, UnsetType] = UNSET + quick_sight_status: Union[str, None, UnsetType] = UNSET """Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc.""" quick_sight_analysis_calculated_fields: Union[List[str], None, UnsetType] = UNSET @@ -192,10 +192,10 @@ class RelatedQuickSightDashboard(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightDashboard" so it serializes correctly - quick_sight_dashboard_published_version_number: Union[int, None, UnsetType] = UNSET + quick_sight_published_version_number: Union[int, None, UnsetType] = UNSET """Version number of the published dashboard.""" - quick_sight_dashboard_last_published_time: Union[int, None, UnsetType] = UNSET + quick_sight_last_published_time: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this dashboard was last published, in milliseconds.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/readme.py b/pyatlan_v9/model/assets/readme.py index dc4c73653..7c5bc6d31 100644 --- a/pyatlan_v9/model/assets/readme.py +++ b/pyatlan_v9/model/assets/readme.py @@ -100,6 +100,8 @@ class Readme(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Readme" + link: Union[str, None, UnsetType] = UNSET """URL to the resource.""" @@ -219,72 +221,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Readme instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.asset is UNSET: - errors.append("asset is required for creation") - if errors: - raise ValueError(f"Readme validation failed: {errors}") - - def minimize(self) -> "Readme": - """ - Return a minimal copy of this Readme with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Readme with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Readme instance with only the minimum required fields. - """ - self.validate() - return Readme(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedReadme": - """ - Create a :class:`RelatedReadme` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedReadme reference to this asset. - """ - if self.guid is not UNSET: - return RelatedReadme(guid=self.guid) - return RelatedReadme(qualified_name=self.qualified_name) - @property def description(self) -> Union[str, None, UnsetType]: """Decode URL-encoded description content for parity with legacy models.""" @@ -627,9 +563,6 @@ def _readme_to_nested(readme: Readme) -> ReadmeNested: is_incomplete=readme.is_incomplete, provenance_type=readme.provenance_type, home_id=readme.home_id, - depth=readme.depth, - immediate_upstream=readme.immediate_upstream, - immediate_downstream=readme.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -659,6 +592,7 @@ def _readme_from_nested(nested: ReadmeNested) -> Readme: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -667,9 +601,6 @@ def _readme_from_nested(nested: ReadmeNested) -> Readme: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_readme_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/readme_template.py b/pyatlan_v9/model/assets/readme_template.py index 5082e348d..7f4987db7 100644 --- a/pyatlan_v9/model/assets/readme_template.py +++ b/pyatlan_v9/model/assets/readme_template.py @@ -45,12 +45,7 @@ from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable -from .resource_related import ( - RelatedFile, - RelatedLink, - RelatedReadme, - RelatedReadmeTemplate, -) +from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -101,6 +96,8 @@ class ReadmeTemplate(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ReadmeTemplate" + icon: Union[str, None, UnsetType] = UNSET """Icon to use for the README template.""" @@ -214,66 +211,6 @@ class ReadmeTemplate(Asset): def __post_init__(self) -> None: self.type_name = "ReadmeTemplate" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ReadmeTemplate instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ReadmeTemplate validation failed: {errors}") - - def minimize(self) -> "ReadmeTemplate": - """ - Return a minimal copy of this ReadmeTemplate with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ReadmeTemplate with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ReadmeTemplate instance with only the minimum required fields. - """ - self.validate() - return ReadmeTemplate(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedReadmeTemplate": - """ - Create a :class:`RelatedReadmeTemplate` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedReadmeTemplate reference to this asset. - """ - if self.guid is not UNSET: - return RelatedReadmeTemplate(guid=self.guid) - return RelatedReadmeTemplate(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -556,9 +493,6 @@ def _readme_template_to_nested(readme_template: ReadmeTemplate) -> ReadmeTemplat is_incomplete=readme_template.is_incomplete, provenance_type=readme_template.provenance_type, home_id=readme_template.home_id, - depth=readme_template.depth, - immediate_upstream=readme_template.immediate_upstream, - immediate_downstream=readme_template.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -592,6 +526,7 @@ def _readme_template_from_nested(nested: ReadmeTemplateNested) -> ReadmeTemplate updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -600,9 +535,6 @@ def _readme_template_from_nested(nested: ReadmeTemplateNested) -> ReadmeTemplate is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_readme_template_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/redash.py b/pyatlan_v9/model/assets/redash.py index e59efb589..93990039e 100644 --- a/pyatlan_v9/model/assets/redash.py +++ b/pyatlan_v9/model/assets/redash.py @@ -44,7 +44,6 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .redash_related import RelatedRedash from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -92,6 +91,8 @@ class Redash(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Redash" + redash_is_published: Union[bool, None, UnsetType] = UNSET """Whether this asset is published in Redash (true) or not (false).""" @@ -190,66 +191,6 @@ class Redash(Asset): def __post_init__(self) -> None: self.type_name = "Redash" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Redash instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Redash validation failed: {errors}") - - def minimize(self) -> "Redash": - """ - Return a minimal copy of this Redash with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Redash with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Redash instance with only the minimum required fields. - """ - self.validate() - return Redash(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedRedash": - """ - Create a :class:`RelatedRedash` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedRedash reference to this asset. - """ - if self.guid is not UNSET: - return RelatedRedash(guid=self.guid) - return RelatedRedash(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -501,9 +442,6 @@ def _redash_to_nested(redash: Redash) -> RedashNested: is_incomplete=redash.is_incomplete, provenance_type=redash.provenance_type, home_id=redash.home_id, - depth=redash.depth, - immediate_upstream=redash.immediate_upstream, - immediate_downstream=redash.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -533,6 +471,7 @@ def _redash_from_nested(nested: RedashNested) -> Redash: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -541,9 +480,6 @@ def _redash_from_nested(nested: RedashNested) -> Redash: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_redash_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/redash_dashboard.py b/pyatlan_v9/model/assets/redash_dashboard.py index 771fe796c..1f28fc421 100644 --- a/pyatlan_v9/model/assets/redash_dashboard.py +++ b/pyatlan_v9/model/assets/redash_dashboard.py @@ -44,7 +44,6 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .redash_related import RelatedRedashDashboard from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -93,6 +92,8 @@ class RedashDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "RedashDashboard" + redash_dashboard_widget_count: Union[int, None, UnsetType] = UNSET """Number of widgets in this dashboard.""" @@ -194,66 +195,6 @@ class RedashDashboard(Asset): def __post_init__(self) -> None: self.type_name = "RedashDashboard" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this RedashDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"RedashDashboard validation failed: {errors}") - - def minimize(self) -> "RedashDashboard": - """ - Return a minimal copy of this RedashDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new RedashDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new RedashDashboard instance with only the minimum required fields. - """ - self.validate() - return RedashDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedRedashDashboard": - """ - Create a :class:`RelatedRedashDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedRedashDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedRedashDashboard(guid=self.guid) - return RelatedRedashDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -520,9 +461,6 @@ def _redash_dashboard_to_nested( is_incomplete=redash_dashboard.is_incomplete, provenance_type=redash_dashboard.provenance_type, home_id=redash_dashboard.home_id, - depth=redash_dashboard.depth, - immediate_upstream=redash_dashboard.immediate_upstream, - immediate_downstream=redash_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -556,6 +494,7 @@ def _redash_dashboard_from_nested(nested: RedashDashboardNested) -> RedashDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -564,9 +503,6 @@ def _redash_dashboard_from_nested(nested: RedashDashboardNested) -> RedashDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_redash_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/redash_query.py b/pyatlan_v9/model/assets/redash_query.py index 778527a73..8edd41061 100644 --- a/pyatlan_v9/model/assets/redash_query.py +++ b/pyatlan_v9/model/assets/redash_query.py @@ -45,7 +45,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .redash_related import RelatedRedashQuery, RelatedRedashVisualization +from .redash_related import RelatedRedashVisualization from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -100,6 +100,8 @@ class RedashQuery(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "RedashQuery" + redash_query_sql: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="redashQuerySQL" ) @@ -223,66 +225,6 @@ class RedashQuery(Asset): def __post_init__(self) -> None: self.type_name = "RedashQuery" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this RedashQuery instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"RedashQuery validation failed: {errors}") - - def minimize(self) -> "RedashQuery": - """ - Return a minimal copy of this RedashQuery with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new RedashQuery with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new RedashQuery instance with only the minimum required fields. - """ - self.validate() - return RedashQuery(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedRedashQuery": - """ - Create a :class:`RelatedRedashQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedRedashQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedRedashQuery(guid=self.guid) - return RelatedRedashQuery(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -576,9 +518,6 @@ def _redash_query_to_nested(redash_query: RedashQuery) -> RedashQueryNested: is_incomplete=redash_query.is_incomplete, provenance_type=redash_query.provenance_type, home_id=redash_query.home_id, - depth=redash_query.depth, - immediate_upstream=redash_query.immediate_upstream, - immediate_downstream=redash_query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -610,6 +549,7 @@ def _redash_query_from_nested(nested: RedashQueryNested) -> RedashQuery: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -618,9 +558,6 @@ def _redash_query_from_nested(nested: RedashQueryNested) -> RedashQuery: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_redash_query_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/redash_visualization.py b/pyatlan_v9/model/assets/redash_visualization.py index 8251decfc..b0bbde7b5 100644 --- a/pyatlan_v9/model/assets/redash_visualization.py +++ b/pyatlan_v9/model/assets/redash_visualization.py @@ -45,7 +45,7 @@ from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .redash_related import RelatedRedashQuery, RelatedRedashVisualization +from .redash_related import RelatedRedashQuery from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -97,6 +97,8 @@ class RedashVisualization(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "RedashVisualization" + redash_visualization_type: Union[str, None, UnsetType] = UNSET """Type of this visualization.""" @@ -213,76 +215,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this RedashVisualization instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.redash_query is UNSET: - errors.append("redash_query is required for creation") - if self.redash_query_name is UNSET: - errors.append("redash_query_name is required for creation") - if self.redash_query_qualified_name is UNSET: - errors.append("redash_query_qualified_name is required for creation") - if errors: - raise ValueError(f"RedashVisualization validation failed: {errors}") - - def minimize(self) -> "RedashVisualization": - """ - Return a minimal copy of this RedashVisualization with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new RedashVisualization with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new RedashVisualization instance with only the minimum required fields. - """ - self.validate() - return RedashVisualization(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedRedashVisualization": - """ - Create a :class:`RelatedRedashVisualization` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedRedashVisualization reference to this asset. - """ - if self.guid is not UNSET: - return RelatedRedashVisualization(guid=self.guid) - return RelatedRedashVisualization(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -563,9 +495,6 @@ def _redash_visualization_to_nested( is_incomplete=redash_visualization.is_incomplete, provenance_type=redash_visualization.provenance_type, home_id=redash_visualization.home_id, - depth=redash_visualization.depth, - immediate_upstream=redash_visualization.immediate_upstream, - immediate_downstream=redash_visualization.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -601,6 +530,7 @@ def _redash_visualization_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -609,9 +539,6 @@ def _redash_visualization_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_redash_visualization_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/referenceable.py b/pyatlan_v9/model/assets/referenceable.py index 9949a9ed6..2cecd04fb 100644 --- a/pyatlan_v9/model/assets/referenceable.py +++ b/pyatlan_v9/model/assets/referenceable.py @@ -19,14 +19,7 @@ import msgspec from msgspec import UNSET, UnsetType -from pyatlan.model.fields.atlan_fields import ( - InternalKeywordField, - InternalKeywordTextField, - InternalNumericField, - KeywordField, - KeywordTextField, - TextField, -) +from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField from pyatlan_v9.model.conversion_utils import ( categorize_relationships, merge_relationships, @@ -78,65 +71,19 @@ def __post_init__(self) -> None: if self.type_name is UNSET: self.type_name = "Referenceable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Referenceable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Referenceable validation failed: {errors}") - - def minimize(self) -> "Referenceable": - """ - Return a minimal copy of this Referenceable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Referenceable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Referenceable instance with only the minimum required fields. - """ - self.validate() - return Referenceable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedReferenceable": - """ - Create a :class:`RelatedReferenceable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedReferenceable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedReferenceable(guid=self.guid) - return RelatedReferenceable(qualified_name=self.qualified_name) + # DEFERRED: Referenceable.TYPE_NAME = InternalKeywordTextField("typeName", "__typeName.keyword", "__typeName", "__typeName") + # DEFERRED: Referenceable.GUID = InternalKeywordField("guid", "__guid", "__guid") + # DEFERRED: Referenceable.CREATED_BY = InternalKeywordField("createdBy", "__createdBy", "__createdBy") + # DEFERRED: Referenceable.UPDATED_BY = InternalKeywordField("updatedBy", "__modifiedBy", "__modifiedBy") + # DEFERRED: Referenceable.STATUS = InternalKeywordField("status", "__state", "__state") + # DEFERRED: Referenceable.ATLAN_TAGS = InternalKeywordTextField("classificationNames", "__traitNames", "__classificationsText", "__classificationNames") + # DEFERRED: Referenceable.PROPAGATED_ATLAN_TAGS = InternalKeywordTextField("classificationNames", "__propagatedTraitNames", "__classificationsText", "__propagatedClassificationNames") + # DEFERRED: Referenceable.ASSIGNED_TERMS = InternalKeywordTextField("meanings", "__meanings", "__meaningsText", "__meanings") + # DEFERRED: Referenceable.SUPER_TYPE_NAMES = InternalKeywordTextField("typeName", "__superTypeNames.keyword", "__superTypeNames", "__superTypeNames") + # DEFERRED: Referenceable.CREATE_TIME = InternalNumericField("createTime", "__timestamp", "__timestamp") + # DEFERRED: Referenceable.UPDATE_TIME = InternalNumericField("updateTime", "__modificationTimestamp", "__modificationTimestamp") + # DEFERRED: Referenceable.QUALIFIED_NAME = KeywordTextField("qualifiedName", "qualifiedName", "qualifiedName.text") + # DEFERRED: Referenceable.CUSTOM_ATTRIBUTES = TextField("customAttributes", "customAttributes") # Entity-level field descriptor placeholders (assigned at module bottom) TYPE_NAME: ClassVar[Any] = None @@ -306,9 +253,6 @@ class ReferenceableNested( is_incomplete: Union[Any, UnsetType] = UNSET provenance_type: Union[Any, UnsetType] = UNSET home_id: Union[Any, UnsetType] = UNSET - depth: Union[Any, UnsetType] = UNSET - immediate_upstream: Union[Any, UnsetType] = UNSET - immediate_downstream: Union[Any, UnsetType] = UNSET attributes: Union[ReferenceableAttributes, UnsetType] = UNSET relationship_attributes: Union[ReferenceableRelationshipAttributes, UnsetType] = ( @@ -384,9 +328,6 @@ def _referenceable_to_nested(referenceable: Referenceable) -> ReferenceableNeste is_incomplete=referenceable.is_incomplete, provenance_type=referenceable.provenance_type, home_id=referenceable.home_id, - depth=referenceable.depth, - immediate_upstream=referenceable.immediate_upstream, - immediate_downstream=referenceable.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -420,6 +361,7 @@ def _referenceable_from_nested(nested: ReferenceableNested) -> Referenceable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -428,9 +370,6 @@ def _referenceable_from_nested(nested: ReferenceableNested) -> Referenceable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_referenceable_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -461,43 +400,3 @@ def _referenceable_from_nested_bytes(data: bytes, serde: Serde) -> Referenceable Referenceable.MEANINGS = RelationField("meanings") Referenceable.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") Referenceable.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") - -Referenceable.TYPE_NAME = InternalKeywordTextField( - "typeName", "__typeName.keyword", "__typeName", "__typeName" -) -Referenceable.GUID = InternalKeywordField("guid", "__guid", "__guid") -Referenceable.CREATED_BY = InternalKeywordField( - "createdBy", "__createdBy", "__createdBy" -) -Referenceable.UPDATED_BY = InternalKeywordField( - "updatedBy", "__modifiedBy", "__modifiedBy" -) -Referenceable.STATUS = InternalKeywordField("status", "__state", "__state") -Referenceable.ATLAN_TAGS = InternalKeywordTextField( - "classificationNames", - "__traitNames", - "__classificationsText", - "__classificationNames", -) -Referenceable.PROPAGATED_ATLAN_TAGS = InternalKeywordTextField( - "classificationNames", - "__propagatedTraitNames", - "__classificationsText", - "__propagatedClassificationNames", -) -Referenceable.ASSIGNED_TERMS = InternalKeywordTextField( - "meanings", "__meanings", "__meaningsText", "__meanings" -) -Referenceable.SUPER_TYPE_NAMES = InternalKeywordTextField( - "typeName", "__superTypeNames.keyword", "__superTypeNames", "__superTypeNames" -) -Referenceable.CREATE_TIME = InternalNumericField( - "createTime", "__timestamp", "__timestamp" -) -Referenceable.UPDATE_TIME = InternalNumericField( - "updateTime", "__modificationTimestamp", "__modificationTimestamp" -) -Referenceable.QUALIFIED_NAME = KeywordTextField( - "qualifiedName", "qualifiedName", "qualifiedName.text" -) -Referenceable.CUSTOM_ATTRIBUTES = TextField("customAttributes", "customAttributes") diff --git a/pyatlan_v9/model/assets/resource.py b/pyatlan_v9/model/assets/resource.py index f397377b5..7771081ab 100644 --- a/pyatlan_v9/model/assets/resource.py +++ b/pyatlan_v9/model/assets/resource.py @@ -45,7 +45,7 @@ from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable -from .resource_related import RelatedFile, RelatedLink, RelatedReadme, RelatedResource +from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -94,6 +94,8 @@ class Resource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Resource" + link: Union[str, None, UnsetType] = UNSET """URL to the resource.""" @@ -201,66 +203,6 @@ class Resource(Asset): def __post_init__(self) -> None: self.type_name = "Resource" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Resource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Resource validation failed: {errors}") - - def minimize(self) -> "Resource": - """ - Return a minimal copy of this Resource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Resource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Resource instance with only the minimum required fields. - """ - self.validate() - return Resource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedResource": - """ - Create a :class:`RelatedResource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedResource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedResource(guid=self.guid) - return RelatedResource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -527,9 +469,6 @@ def _resource_to_nested(resource: Resource) -> ResourceNested: is_incomplete=resource.is_incomplete, provenance_type=resource.provenance_type, home_id=resource.home_id, - depth=resource.depth, - immediate_upstream=resource.immediate_upstream, - immediate_downstream=resource.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -561,6 +500,7 @@ def _resource_from_nested(nested: ResourceNested) -> Resource: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -569,9 +509,6 @@ def _resource_from_nested(nested: ResourceNested) -> Resource: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_resource_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/s3.py b/pyatlan_v9/model/assets/s3.py index 208e031f5..d88a3f078 100644 --- a/pyatlan_v9/model/assets/s3.py +++ b/pyatlan_v9/model/assets/s3.py @@ -47,7 +47,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .s3_related import RelatedS3 from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -106,6 +105,8 @@ class S3(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "S3" + s3_etag: Union[str, None, UnsetType] = msgspec.field(default=UNSET, name="s3ETag") """Entity tag for the asset. An entity tag is a hash of the object and represents changes to the contents of an object only, not its metadata.""" @@ -243,66 +244,6 @@ class S3(Asset): def __post_init__(self) -> None: self.type_name = "S3" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this S3 instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"S3 validation failed: {errors}") - - def minimize(self) -> "S3": - """ - Return a minimal copy of this S3 with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new S3 with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new S3 instance with only the minimum required fields. - """ - self.validate() - return S3(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedS3": - """ - Create a :class:`RelatedS3` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedS3 reference to this asset. - """ - if self.guid is not UNSET: - return RelatedS3(guid=self.guid) - return RelatedS3(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -615,9 +556,6 @@ def _s3_to_nested(s3: S3) -> S3Nested: is_incomplete=s3.is_incomplete, provenance_type=s3.provenance_type, home_id=s3.home_id, - depth=s3.depth, - immediate_upstream=s3.immediate_upstream, - immediate_downstream=s3.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -647,6 +585,7 @@ def _s3_from_nested(nested: S3Nested) -> S3: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -655,9 +594,6 @@ def _s3_from_nested(nested: S3Nested) -> S3: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_s3_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/s3_bucket.py b/pyatlan_v9/model/assets/s3_bucket.py index b17cd37c1..3e3139c8c 100644 --- a/pyatlan_v9/model/assets/s3_bucket.py +++ b/pyatlan_v9/model/assets/s3_bucket.py @@ -48,7 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .s3_related import RelatedS3Bucket, RelatedS3Object, RelatedS3Prefix +from .s3_related import RelatedS3Object, RelatedS3Prefix from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -111,6 +111,8 @@ class S3Bucket(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "S3Bucket" + s3_object_count: Union[int, None, UnsetType] = UNSET """Number of objects within the bucket.""" @@ -260,66 +262,6 @@ class S3Bucket(Asset): def __post_init__(self) -> None: self.type_name = "S3Bucket" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this S3Bucket instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"S3Bucket validation failed: {errors}") - - def minimize(self) -> "S3Bucket": - """ - Return a minimal copy of this S3Bucket with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new S3Bucket with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new S3Bucket instance with only the minimum required fields. - """ - self.validate() - return S3Bucket(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedS3Bucket": - """ - Create a :class:`RelatedS3Bucket` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedS3Bucket reference to this asset. - """ - if self.guid is not UNSET: - return RelatedS3Bucket(guid=self.guid) - return RelatedS3Bucket(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -708,9 +650,6 @@ def _s3_bucket_to_nested(s3_bucket: S3Bucket) -> S3BucketNested: is_incomplete=s3_bucket.is_incomplete, provenance_type=s3_bucket.provenance_type, home_id=s3_bucket.home_id, - depth=s3_bucket.depth, - immediate_upstream=s3_bucket.immediate_upstream, - immediate_downstream=s3_bucket.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -742,6 +681,7 @@ def _s3_bucket_from_nested(nested: S3BucketNested) -> S3Bucket: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -750,9 +690,6 @@ def _s3_bucket_from_nested(nested: S3BucketNested) -> S3Bucket: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_s3_bucket_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/s3_object.py b/pyatlan_v9/model/assets/s3_object.py index d63897441..5a8f7bec2 100644 --- a/pyatlan_v9/model/assets/s3_object.py +++ b/pyatlan_v9/model/assets/s3_object.py @@ -50,7 +50,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .s3_related import RelatedS3Bucket, RelatedS3Object, RelatedS3Prefix +from .s3_related import RelatedS3Bucket, RelatedS3Prefix from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -123,6 +123,8 @@ class S3Object(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "S3Object" + s3_object_last_modified_time: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this object was last updated, in milliseconds, or when it was created if it has never been modified.""" @@ -308,76 +310,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this S3Object instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.bucket is UNSET: - errors.append("bucket is required for creation") - if self.s3_bucket_name is UNSET: - errors.append("s3_bucket_name is required for creation") - if self.s3_bucket_qualified_name is UNSET: - errors.append("s3_bucket_qualified_name is required for creation") - if errors: - raise ValueError(f"S3Object validation failed: {errors}") - - def minimize(self) -> "S3Object": - """ - Return a minimal copy of this S3Object with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new S3Object with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new S3Object instance with only the minimum required fields. - """ - self.validate() - return S3Object(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedS3Object": - """ - Create a :class:`RelatedS3Object` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedS3Object reference to this asset. - """ - if self.guid is not UNSET: - return RelatedS3Object(guid=self.guid) - return RelatedS3Object(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -914,9 +846,6 @@ def _s3_object_to_nested(s3_object: S3Object) -> S3ObjectNested: is_incomplete=s3_object.is_incomplete, provenance_type=s3_object.provenance_type, home_id=s3_object.home_id, - depth=s3_object.depth, - immediate_upstream=s3_object.immediate_upstream, - immediate_downstream=s3_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -948,6 +877,7 @@ def _s3_object_from_nested(nested: S3ObjectNested) -> S3Object: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -956,9 +886,6 @@ def _s3_object_from_nested(nested: S3ObjectNested) -> S3Object: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_s3_object_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/s3_prefix.py b/pyatlan_v9/model/assets/s3_prefix.py index 0e99dd02b..393b81a71 100644 --- a/pyatlan_v9/model/assets/s3_prefix.py +++ b/pyatlan_v9/model/assets/s3_prefix.py @@ -115,6 +115,8 @@ class S3Prefix(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "S3Prefix" + s3_bucket_name: Union[str, None, UnsetType] = UNSET """Simple name of the bucket in which this prefix exists.""" @@ -282,76 +284,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this S3Prefix instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.s3_bucket is UNSET: - errors.append("s3_bucket is required for creation") - if self.s3_bucket_name is UNSET: - errors.append("s3_bucket_name is required for creation") - if self.s3_bucket_qualified_name is UNSET: - errors.append("s3_bucket_qualified_name is required for creation") - if errors: - raise ValueError(f"S3Prefix validation failed: {errors}") - - def minimize(self) -> "S3Prefix": - """ - Return a minimal copy of this S3Prefix with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new S3Prefix with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new S3Prefix instance with only the minimum required fields. - """ - self.validate() - return S3Prefix(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedS3Prefix": - """ - Create a :class:`RelatedS3Prefix` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedS3Prefix reference to this asset. - """ - if self.guid is not UNSET: - return RelatedS3Prefix(guid=self.guid) - return RelatedS3Prefix(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -704,9 +636,6 @@ def _s3_prefix_to_nested(s3_prefix: S3Prefix) -> S3PrefixNested: is_incomplete=s3_prefix.is_incomplete, provenance_type=s3_prefix.provenance_type, home_id=s3_prefix.home_id, - depth=s3_prefix.depth, - immediate_upstream=s3_prefix.immediate_upstream, - immediate_downstream=s3_prefix.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -738,6 +667,7 @@ def _s3_prefix_from_nested(nested: S3PrefixNested) -> S3Prefix: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -746,9 +676,6 @@ def _s3_prefix_from_nested(nested: S3PrefixNested) -> S3Prefix: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_s3_prefix_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/saa_s.py b/pyatlan_v9/model/assets/saa_s.py index 66ba3fcf0..6df23b888 100644 --- a/pyatlan_v9/model/assets/saa_s.py +++ b/pyatlan_v9/model/assets/saa_s.py @@ -37,7 +37,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedSaaS from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .gtc_related import RelatedAtlasGlossaryTerm @@ -91,6 +90,8 @@ class SaaS(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SaaS" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class SaaS(Asset): def __post_init__(self) -> None: self.type_name = "SaaS" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SaaS instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SaaS validation failed: {errors}") - - def minimize(self) -> "SaaS": - """ - Return a minimal copy of this SaaS with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SaaS with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SaaS instance with only the minimum required fields. - """ - self.validate() - return SaaS(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSaaS": - """ - Create a :class:`RelatedSaaS` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSaaS reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSaaS(guid=self.guid) - return RelatedSaaS(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -489,9 +430,6 @@ def _saa_s_to_nested(saa_s: SaaS) -> SaaSNested: is_incomplete=saa_s.is_incomplete, provenance_type=saa_s.provenance_type, home_id=saa_s.home_id, - depth=saa_s.depth, - immediate_upstream=saa_s.immediate_upstream, - immediate_downstream=saa_s.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -521,6 +459,7 @@ def _saa_s_from_nested(nested: SaaSNested) -> SaaS: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -529,9 +468,6 @@ def _saa_s_from_nested(nested: SaaSNested) -> SaaS: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_saa_s_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sage_maker.py b/pyatlan_v9/model/assets/sage_maker.py index b458ae42f..5092d4f31 100644 --- a/pyatlan_v9/model/assets/sage_maker.py +++ b/pyatlan_v9/model/assets/sage_maker.py @@ -47,7 +47,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMaker from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -110,6 +109,8 @@ class SageMaker(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMaker" + sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET """Primary S3 URI associated with this SageMaker asset.""" @@ -273,66 +274,6 @@ class SageMaker(Asset): def __post_init__(self) -> None: self.type_name = "SageMaker" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMaker instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMaker validation failed: {errors}") - - def minimize(self) -> "SageMaker": - """ - Return a minimal copy of this SageMaker with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMaker with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMaker instance with only the minimum required fields. - """ - self.validate() - return SageMaker(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMaker": - """ - Create a :class:`RelatedSageMaker` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMaker reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMaker(guid=self.guid) - return RelatedSageMaker(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -693,9 +634,6 @@ def _sage_maker_to_nested(sage_maker: SageMaker) -> SageMakerNested: is_incomplete=sage_maker.is_incomplete, provenance_type=sage_maker.provenance_type, home_id=sage_maker.home_id, - depth=sage_maker.depth, - immediate_upstream=sage_maker.immediate_upstream, - immediate_downstream=sage_maker.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -727,6 +665,7 @@ def _sage_maker_from_nested(nested: SageMakerNested) -> SageMaker: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -735,9 +674,6 @@ def _sage_maker_from_nested(nested: SageMakerNested) -> SageMaker: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sage_maker_feature.py b/pyatlan_v9/model/assets/sage_maker_feature.py index 2f46f4367..5533ecaea 100644 --- a/pyatlan_v9/model/assets/sage_maker_feature.py +++ b/pyatlan_v9/model/assets/sage_maker_feature.py @@ -48,7 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMakerFeature, RelatedSageMakerFeatureGroup +from .sage_maker_related import RelatedSageMakerFeatureGroup from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -64,10 +64,10 @@ class SageMakerFeature(Asset): Instance of a SageMaker Feature in Atlan. Represents an individual feature within a Feature Group, including its data type and metadata. """ - SAGE_MAKER_FEATURE_GROUP_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_QUALIFIED_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_DATA_TYPE: ClassVar[Any] = None - SAGE_MAKER_FEATURE_IS_RECORD_IDENTIFIER: ClassVar[Any] = None + SAGE_MAKER_GROUP_NAME: ClassVar[Any] = None + SAGE_MAKER_GROUP_QUALIFIED_NAME: ClassVar[Any] = None + SAGE_MAKER_DATA_TYPE: ClassVar[Any] = None + SAGE_MAKER_IS_RECORD_IDENTIFIER: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -116,16 +116,18 @@ class SageMakerFeature(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_feature_group_name: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerFeature" + + sage_maker_group_name: Union[str, None, UnsetType] = UNSET """Name of the Feature Group that contains this feature.""" - sage_maker_feature_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the Feature Group that contains this feature.""" - sage_maker_feature_data_type: Union[str, None, UnsetType] = UNSET + sage_maker_data_type: Union[str, None, UnsetType] = UNSET """Data type of the feature (e.g., String, Integral, Fractional).""" - sage_maker_feature_is_record_identifier: Union[bool, None, UnsetType] = UNSET + sage_maker_is_record_identifier: Union[bool, None, UnsetType] = UNSET """Whether this feature serves as the record identifier for the Feature Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -302,72 +304,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerFeature instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sage_maker_feature_group is UNSET: - errors.append("sage_maker_feature_group is required for creation") - if errors: - raise ValueError(f"SageMakerFeature validation failed: {errors}") - - def minimize(self) -> "SageMakerFeature": - """ - Return a minimal copy of this SageMakerFeature with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerFeature with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerFeature instance with only the minimum required fields. - """ - self.validate() - return SageMakerFeature(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMakerFeature": - """ - Create a :class:`RelatedSageMakerFeature` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerFeature reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerFeature(guid=self.guid) - return RelatedSageMakerFeature(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -425,16 +361,16 @@ def from_json( class SageMakerFeatureAttributes(AssetAttributes): """SageMakerFeature-specific attributes for nested API format.""" - sage_maker_feature_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_name: Union[str, None, UnsetType] = UNSET """Name of the Feature Group that contains this feature.""" - sage_maker_feature_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the Feature Group that contains this feature.""" - sage_maker_feature_data_type: Union[str, None, UnsetType] = UNSET + sage_maker_data_type: Union[str, None, UnsetType] = UNSET """Data type of the feature (e.g., String, Integral, Fractional).""" - sage_maker_feature_is_record_identifier: Union[bool, None, UnsetType] = UNSET + sage_maker_is_record_identifier: Union[bool, None, UnsetType] = UNSET """Whether this feature serves as the record identifier for the Feature Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -665,14 +601,10 @@ def _populate_sage_maker_feature_attrs( ) -> None: """Populate SageMakerFeature-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_feature_group_name = obj.sage_maker_feature_group_name - attrs.sage_maker_feature_group_qualified_name = ( - obj.sage_maker_feature_group_qualified_name - ) - attrs.sage_maker_feature_data_type = obj.sage_maker_feature_data_type - attrs.sage_maker_feature_is_record_identifier = ( - obj.sage_maker_feature_is_record_identifier - ) + attrs.sage_maker_group_name = obj.sage_maker_group_name + attrs.sage_maker_group_qualified_name = obj.sage_maker_group_qualified_name + attrs.sage_maker_data_type = obj.sage_maker_data_type + attrs.sage_maker_is_record_identifier = obj.sage_maker_is_record_identifier attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -700,14 +632,10 @@ def _populate_sage_maker_feature_attrs( def _extract_sage_maker_feature_attrs(attrs: SageMakerFeatureAttributes) -> dict: """Extract all SageMakerFeature attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_feature_group_name"] = attrs.sage_maker_feature_group_name - result["sage_maker_feature_group_qualified_name"] = ( - attrs.sage_maker_feature_group_qualified_name - ) - result["sage_maker_feature_data_type"] = attrs.sage_maker_feature_data_type - result["sage_maker_feature_is_record_identifier"] = ( - attrs.sage_maker_feature_is_record_identifier - ) + result["sage_maker_group_name"] = attrs.sage_maker_group_name + result["sage_maker_group_qualified_name"] = attrs.sage_maker_group_qualified_name + result["sage_maker_data_type"] = attrs.sage_maker_data_type + result["sage_maker_is_record_identifier"] = attrs.sage_maker_is_record_identifier result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -772,9 +700,6 @@ def _sage_maker_feature_to_nested( is_incomplete=sage_maker_feature.is_incomplete, provenance_type=sage_maker_feature.provenance_type, home_id=sage_maker_feature.home_id, - depth=sage_maker_feature.depth, - immediate_upstream=sage_maker_feature.immediate_upstream, - immediate_downstream=sage_maker_feature.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -808,6 +733,7 @@ def _sage_maker_feature_from_nested(nested: SageMakerFeatureNested) -> SageMaker updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -816,9 +742,6 @@ def _sage_maker_feature_from_nested(nested: SageMakerFeatureNested) -> SageMaker is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_feature_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -850,17 +773,17 @@ def _sage_maker_feature_from_nested_bytes( RelationField, ) -SageMakerFeature.SAGE_MAKER_FEATURE_GROUP_NAME = KeywordField( - "sageMakerFeatureGroupName", "sageMakerFeatureGroupName" +SageMakerFeature.SAGE_MAKER_GROUP_NAME = KeywordField( + "sageMakerGroupName", "sageMakerGroupName" ) -SageMakerFeature.SAGE_MAKER_FEATURE_GROUP_QUALIFIED_NAME = KeywordField( - "sageMakerFeatureGroupQualifiedName", "sageMakerFeatureGroupQualifiedName" +SageMakerFeature.SAGE_MAKER_GROUP_QUALIFIED_NAME = KeywordField( + "sageMakerGroupQualifiedName", "sageMakerGroupQualifiedName" ) -SageMakerFeature.SAGE_MAKER_FEATURE_DATA_TYPE = KeywordField( - "sageMakerFeatureDataType", "sageMakerFeatureDataType" +SageMakerFeature.SAGE_MAKER_DATA_TYPE = KeywordField( + "sageMakerDataType", "sageMakerDataType" ) -SageMakerFeature.SAGE_MAKER_FEATURE_IS_RECORD_IDENTIFIER = BooleanField( - "sageMakerFeatureIsRecordIdentifier", "sageMakerFeatureIsRecordIdentifier" +SageMakerFeature.SAGE_MAKER_IS_RECORD_IDENTIFIER = BooleanField( + "sageMakerIsRecordIdentifier", "sageMakerIsRecordIdentifier" ) SageMakerFeature.SAGE_MAKER_S3_URI = KeywordField("sageMakerS3Uri", "sageMakerS3Uri") SageMakerFeature.ETHICAL_AI_PRIVACY_CONFIG = KeywordField( diff --git a/pyatlan_v9/model/assets/sage_maker_feature_group.py b/pyatlan_v9/model/assets/sage_maker_feature_group.py index ea8acd818..aa0ef5555 100644 --- a/pyatlan_v9/model/assets/sage_maker_feature_group.py +++ b/pyatlan_v9/model/assets/sage_maker_feature_group.py @@ -47,7 +47,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMakerFeature, RelatedSageMakerFeatureGroup +from .sage_maker_related import RelatedSageMakerFeature from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -63,11 +63,11 @@ class SageMakerFeatureGroup(Asset): Instance of a SageMaker Feature Store Feature Group in Atlan. Represents a collection of related features that can be used for machine learning training and inference. """ - SAGE_MAKER_FEATURE_GROUP_STATUS: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_RECORD_ID_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_GLUE_DATABASE_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_GLUE_TABLE_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_FEATURE_COUNT: ClassVar[Any] = None + SAGE_MAKER_STATUS: ClassVar[Any] = None + SAGE_MAKER_RECORD_ID_NAME: ClassVar[Any] = None + SAGE_MAKER_GLUE_DATABASE_NAME: ClassVar[Any] = None + SAGE_MAKER_GLUE_TABLE_NAME: ClassVar[Any] = None + SAGE_MAKER_FEATURE_COUNT: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -116,19 +116,21 @@ class SageMakerFeatureGroup(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_feature_group_status: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerFeatureGroup" + + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Feature Group (e.g., Created, Creating, Failed).""" - sage_maker_feature_group_record_id_name: Union[str, None, UnsetType] = UNSET + sage_maker_record_id_name: Union[str, None, UnsetType] = UNSET """Name of the feature that serves as the record identifier.""" - sage_maker_feature_group_glue_database_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_database_name: Union[str, None, UnsetType] = UNSET """AWS Glue database name associated with this Feature Group.""" - sage_maker_feature_group_glue_table_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_table_name: Union[str, None, UnsetType] = UNSET """AWS Glue table name associated with this Feature Group.""" - sage_maker_feature_group_feature_count: Union[int, None, UnsetType] = UNSET + sage_maker_feature_count: Union[int, None, UnsetType] = UNSET """Number of features in this Feature Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -297,66 +299,6 @@ class SageMakerFeatureGroup(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerFeatureGroup" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerFeatureGroup instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMakerFeatureGroup validation failed: {errors}") - - def minimize(self) -> "SageMakerFeatureGroup": - """ - Return a minimal copy of this SageMakerFeatureGroup with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerFeatureGroup with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerFeatureGroup instance with only the minimum required fields. - """ - self.validate() - return SageMakerFeatureGroup(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMakerFeatureGroup": - """ - Create a :class:`RelatedSageMakerFeatureGroup` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerFeatureGroup reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerFeatureGroup(guid=self.guid) - return RelatedSageMakerFeatureGroup(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -414,19 +356,19 @@ def from_json( class SageMakerFeatureGroupAttributes(AssetAttributes): """SageMakerFeatureGroup-specific attributes for nested API format.""" - sage_maker_feature_group_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Feature Group (e.g., Created, Creating, Failed).""" - sage_maker_feature_group_record_id_name: Union[str, None, UnsetType] = UNSET + sage_maker_record_id_name: Union[str, None, UnsetType] = UNSET """Name of the feature that serves as the record identifier.""" - sage_maker_feature_group_glue_database_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_database_name: Union[str, None, UnsetType] = UNSET """AWS Glue database name associated with this Feature Group.""" - sage_maker_feature_group_glue_table_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_table_name: Union[str, None, UnsetType] = UNSET """AWS Glue table name associated with this Feature Group.""" - sage_maker_feature_group_feature_count: Union[int, None, UnsetType] = UNSET + sage_maker_feature_count: Union[int, None, UnsetType] = UNSET """Number of features in this Feature Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -655,19 +597,11 @@ def _populate_sage_maker_feature_group_attrs( ) -> None: """Populate SageMakerFeatureGroup-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_feature_group_status = obj.sage_maker_feature_group_status - attrs.sage_maker_feature_group_record_id_name = ( - obj.sage_maker_feature_group_record_id_name - ) - attrs.sage_maker_feature_group_glue_database_name = ( - obj.sage_maker_feature_group_glue_database_name - ) - attrs.sage_maker_feature_group_glue_table_name = ( - obj.sage_maker_feature_group_glue_table_name - ) - attrs.sage_maker_feature_group_feature_count = ( - obj.sage_maker_feature_group_feature_count - ) + attrs.sage_maker_status = obj.sage_maker_status + attrs.sage_maker_record_id_name = obj.sage_maker_record_id_name + attrs.sage_maker_glue_database_name = obj.sage_maker_glue_database_name + attrs.sage_maker_glue_table_name = obj.sage_maker_glue_table_name + attrs.sage_maker_feature_count = obj.sage_maker_feature_count attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -697,19 +631,11 @@ def _extract_sage_maker_feature_group_attrs( ) -> dict: """Extract all SageMakerFeatureGroup attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_feature_group_status"] = attrs.sage_maker_feature_group_status - result["sage_maker_feature_group_record_id_name"] = ( - attrs.sage_maker_feature_group_record_id_name - ) - result["sage_maker_feature_group_glue_database_name"] = ( - attrs.sage_maker_feature_group_glue_database_name - ) - result["sage_maker_feature_group_glue_table_name"] = ( - attrs.sage_maker_feature_group_glue_table_name - ) - result["sage_maker_feature_group_feature_count"] = ( - attrs.sage_maker_feature_group_feature_count - ) + result["sage_maker_status"] = attrs.sage_maker_status + result["sage_maker_record_id_name"] = attrs.sage_maker_record_id_name + result["sage_maker_glue_database_name"] = attrs.sage_maker_glue_database_name + result["sage_maker_glue_table_name"] = attrs.sage_maker_glue_table_name + result["sage_maker_feature_count"] = attrs.sage_maker_feature_count result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -774,9 +700,6 @@ def _sage_maker_feature_group_to_nested( is_incomplete=sage_maker_feature_group.is_incomplete, provenance_type=sage_maker_feature_group.provenance_type, home_id=sage_maker_feature_group.home_id, - depth=sage_maker_feature_group.depth, - immediate_upstream=sage_maker_feature_group.immediate_upstream, - immediate_downstream=sage_maker_feature_group.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -812,6 +735,7 @@ def _sage_maker_feature_group_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -820,9 +744,6 @@ def _sage_maker_feature_group_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_feature_group_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -854,20 +775,20 @@ def _sage_maker_feature_group_from_nested_bytes( RelationField, ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_STATUS = KeywordField( - "sageMakerFeatureGroupStatus", "sageMakerFeatureGroupStatus" +SageMakerFeatureGroup.SAGE_MAKER_STATUS = KeywordField( + "sageMakerStatus", "sageMakerStatus" ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_RECORD_ID_NAME = KeywordField( - "sageMakerFeatureGroupRecordIdName", "sageMakerFeatureGroupRecordIdName" +SageMakerFeatureGroup.SAGE_MAKER_RECORD_ID_NAME = KeywordField( + "sageMakerRecordIdName", "sageMakerRecordIdName" ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_GLUE_DATABASE_NAME = KeywordField( - "sageMakerFeatureGroupGlueDatabaseName", "sageMakerFeatureGroupGlueDatabaseName" +SageMakerFeatureGroup.SAGE_MAKER_GLUE_DATABASE_NAME = KeywordField( + "sageMakerGlueDatabaseName", "sageMakerGlueDatabaseName" ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_GLUE_TABLE_NAME = KeywordField( - "sageMakerFeatureGroupGlueTableName", "sageMakerFeatureGroupGlueTableName" +SageMakerFeatureGroup.SAGE_MAKER_GLUE_TABLE_NAME = KeywordField( + "sageMakerGlueTableName", "sageMakerGlueTableName" ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_FEATURE_COUNT = NumericField( - "sageMakerFeatureGroupFeatureCount", "sageMakerFeatureGroupFeatureCount" +SageMakerFeatureGroup.SAGE_MAKER_FEATURE_COUNT = NumericField( + "sageMakerFeatureCount", "sageMakerFeatureCount" ) SageMakerFeatureGroup.SAGE_MAKER_S3_URI = KeywordField( "sageMakerS3Uri", "sageMakerS3Uri" diff --git a/pyatlan_v9/model/assets/sage_maker_model.py b/pyatlan_v9/model/assets/sage_maker_model.py index 274767f14..be4aa7ad9 100644 --- a/pyatlan_v9/model/assets/sage_maker_model.py +++ b/pyatlan_v9/model/assets/sage_maker_model.py @@ -50,7 +50,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .sage_maker_related import ( - RelatedSageMakerModel, RelatedSageMakerModelDeployment, RelatedSageMakerModelGroup, ) @@ -69,12 +68,12 @@ class SageMakerModel(Asset): Instance of a SageMaker ML Model in Atlan. Represents trained machine learning models that can be deployed for inference. """ - SAGE_MAKER_MODEL_CONTAINER_IMAGE: ClassVar[Any] = None - SAGE_MAKER_MODEL_EXECUTION_ROLE_ARN: ClassVar[Any] = None - SAGE_MAKER_MODEL_MODEL_GROUP_NAME: ClassVar[Any] = None - SAGE_MAKER_MODEL_MODEL_GROUP_QUALIFIED_NAME: ClassVar[Any] = None - SAGE_MAKER_MODEL_VERSION: ClassVar[Any] = None - SAGE_MAKER_MODEL_STATUS: ClassVar[Any] = None + SAGE_MAKER_CONTAINER_IMAGE: ClassVar[Any] = None + SAGE_MAKER_EXECUTION_ROLE_ARN: ClassVar[Any] = None + SAGE_MAKER_MODEL_GROUP_NAME: ClassVar[Any] = None + SAGE_MAKER_MODEL_GROUP_QUALIFIED_NAME: ClassVar[Any] = None + SAGE_MAKER_VERSION: ClassVar[Any] = None + SAGE_MAKER_STATUS: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -125,22 +124,24 @@ class SageMakerModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_model_container_image: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerModel" + + sage_maker_container_image: Union[str, None, UnsetType] = UNSET """Docker container image used for the model.""" - sage_maker_model_execution_role_arn: Union[str, None, UnsetType] = UNSET + sage_maker_execution_role_arn: Union[str, None, UnsetType] = UNSET """ARN of the IAM role used by the model for accessing AWS resources.""" - sage_maker_model_model_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model Group.""" - sage_maker_model_model_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model Group.""" - sage_maker_model_version: Union[str, None, UnsetType] = UNSET + sage_maker_version: Union[str, None, UnsetType] = UNSET """Version of the SageMaker Model Package.""" - sage_maker_model_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Status of the SageMaker Model Package (ACTIVE or INACTIVE).""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -323,78 +324,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sage_maker_model_group is UNSET: - errors.append("sage_maker_model_group is required for creation") - if self.sage_maker_model_group_name is UNSET: - errors.append("sage_maker_model_group_name is required for creation") - if self.sage_maker_model_group_qualified_name is UNSET: - errors.append( - "sage_maker_model_group_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SageMakerModel validation failed: {errors}") - - def minimize(self) -> "SageMakerModel": - """ - Return a minimal copy of this SageMakerModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerModel instance with only the minimum required fields. - """ - self.validate() - return SageMakerModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMakerModel": - """ - Create a :class:`RelatedSageMakerModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerModel(guid=self.guid) - return RelatedSageMakerModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -450,22 +379,22 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SageMakerMo class SageMakerModelAttributes(AssetAttributes): """SageMakerModel-specific attributes for nested API format.""" - sage_maker_model_container_image: Union[str, None, UnsetType] = UNSET + sage_maker_container_image: Union[str, None, UnsetType] = UNSET """Docker container image used for the model.""" - sage_maker_model_execution_role_arn: Union[str, None, UnsetType] = UNSET + sage_maker_execution_role_arn: Union[str, None, UnsetType] = UNSET """ARN of the IAM role used by the model for accessing AWS resources.""" - sage_maker_model_model_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model Group.""" - sage_maker_model_model_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model Group.""" - sage_maker_model_version: Union[str, None, UnsetType] = UNSET + sage_maker_version: Union[str, None, UnsetType] = UNSET """Version of the SageMaker Model Package.""" - sage_maker_model_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Status of the SageMaker Model Package (ACTIVE or INACTIVE).""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -704,14 +633,14 @@ def _populate_sage_maker_model_attrs( ) -> None: """Populate SageMakerModel-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_model_container_image = obj.sage_maker_model_container_image - attrs.sage_maker_model_execution_role_arn = obj.sage_maker_model_execution_role_arn - attrs.sage_maker_model_model_group_name = obj.sage_maker_model_model_group_name - attrs.sage_maker_model_model_group_qualified_name = ( - obj.sage_maker_model_model_group_qualified_name - ) - attrs.sage_maker_model_version = obj.sage_maker_model_version - attrs.sage_maker_model_status = obj.sage_maker_model_status + attrs.sage_maker_container_image = obj.sage_maker_container_image + attrs.sage_maker_execution_role_arn = obj.sage_maker_execution_role_arn + attrs.sage_maker_model_group_name = obj.sage_maker_model_group_name + attrs.sage_maker_model_group_qualified_name = ( + obj.sage_maker_model_group_qualified_name + ) + attrs.sage_maker_version = obj.sage_maker_version + attrs.sage_maker_status = obj.sage_maker_status attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -739,18 +668,14 @@ def _populate_sage_maker_model_attrs( def _extract_sage_maker_model_attrs(attrs: SageMakerModelAttributes) -> dict: """Extract all SageMakerModel attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_model_container_image"] = attrs.sage_maker_model_container_image - result["sage_maker_model_execution_role_arn"] = ( - attrs.sage_maker_model_execution_role_arn - ) - result["sage_maker_model_model_group_name"] = ( - attrs.sage_maker_model_model_group_name - ) - result["sage_maker_model_model_group_qualified_name"] = ( - attrs.sage_maker_model_model_group_qualified_name - ) - result["sage_maker_model_version"] = attrs.sage_maker_model_version - result["sage_maker_model_status"] = attrs.sage_maker_model_status + result["sage_maker_container_image"] = attrs.sage_maker_container_image + result["sage_maker_execution_role_arn"] = attrs.sage_maker_execution_role_arn + result["sage_maker_model_group_name"] = attrs.sage_maker_model_group_name + result["sage_maker_model_group_qualified_name"] = ( + attrs.sage_maker_model_group_qualified_name + ) + result["sage_maker_version"] = attrs.sage_maker_version + result["sage_maker_status"] = attrs.sage_maker_status result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -815,9 +740,6 @@ def _sage_maker_model_to_nested( is_incomplete=sage_maker_model.is_incomplete, provenance_type=sage_maker_model.provenance_type, home_id=sage_maker_model.home_id, - depth=sage_maker_model.depth, - immediate_upstream=sage_maker_model.immediate_upstream, - immediate_downstream=sage_maker_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -851,6 +773,7 @@ def _sage_maker_model_from_nested(nested: SageMakerModelNested) -> SageMakerMode updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -859,9 +782,6 @@ def _sage_maker_model_from_nested(nested: SageMakerModelNested) -> SageMakerMode is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -890,24 +810,20 @@ def _sage_maker_model_from_nested_bytes(data: bytes, serde: Serde) -> SageMakerM RelationField, ) -SageMakerModel.SAGE_MAKER_MODEL_CONTAINER_IMAGE = KeywordField( - "sageMakerModelContainerImage", "sageMakerModelContainerImage" -) -SageMakerModel.SAGE_MAKER_MODEL_EXECUTION_ROLE_ARN = KeywordField( - "sageMakerModelExecutionRoleArn", "sageMakerModelExecutionRoleArn" -) -SageMakerModel.SAGE_MAKER_MODEL_MODEL_GROUP_NAME = KeywordField( - "sageMakerModelModelGroupName", "sageMakerModelModelGroupName" +SageMakerModel.SAGE_MAKER_CONTAINER_IMAGE = KeywordField( + "sageMakerContainerImage", "sageMakerContainerImage" ) -SageMakerModel.SAGE_MAKER_MODEL_MODEL_GROUP_QUALIFIED_NAME = KeywordField( - "sageMakerModelModelGroupQualifiedName", "sageMakerModelModelGroupQualifiedName" +SageMakerModel.SAGE_MAKER_EXECUTION_ROLE_ARN = KeywordField( + "sageMakerExecutionRoleArn", "sageMakerExecutionRoleArn" ) -SageMakerModel.SAGE_MAKER_MODEL_VERSION = KeywordField( - "sageMakerModelVersion", "sageMakerModelVersion" +SageMakerModel.SAGE_MAKER_MODEL_GROUP_NAME = KeywordField( + "sageMakerModelGroupName", "sageMakerModelGroupName" ) -SageMakerModel.SAGE_MAKER_MODEL_STATUS = KeywordField( - "sageMakerModelStatus", "sageMakerModelStatus" +SageMakerModel.SAGE_MAKER_MODEL_GROUP_QUALIFIED_NAME = KeywordField( + "sageMakerModelGroupQualifiedName", "sageMakerModelGroupQualifiedName" ) +SageMakerModel.SAGE_MAKER_VERSION = KeywordField("sageMakerVersion", "sageMakerVersion") +SageMakerModel.SAGE_MAKER_STATUS = KeywordField("sageMakerStatus", "sageMakerStatus") SageMakerModel.SAGE_MAKER_S3_URI = KeywordField("sageMakerS3Uri", "sageMakerS3Uri") SageMakerModel.ETHICAL_AI_PRIVACY_CONFIG = KeywordField( "ethicalAIPrivacyConfig", "ethicalAIPrivacyConfig" diff --git a/pyatlan_v9/model/assets/sage_maker_model_deployment.py b/pyatlan_v9/model/assets/sage_maker_model_deployment.py index c2c9fa1ef..e2b1b53ed 100644 --- a/pyatlan_v9/model/assets/sage_maker_model_deployment.py +++ b/pyatlan_v9/model/assets/sage_maker_model_deployment.py @@ -48,7 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMakerModel, RelatedSageMakerModelDeployment +from .sage_maker_related import RelatedSageMakerModel from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -64,10 +64,10 @@ class SageMakerModelDeployment(Asset): Instance of a SageMaker Endpoint in Atlan. Represents deployed models that can serve real-time inference requests. """ - SAGE_MAKER_MODEL_DEPLOYMENT_STATUS: ClassVar[Any] = None - SAGE_MAKER_MODEL_DEPLOYMENT_ENDPOINT_CONFIG_NAME: ClassVar[Any] = None - SAGE_MAKER_MODEL_DEPLOYMENT_MODEL_NAME: ClassVar[Any] = None - SAGE_MAKER_MODEL_DEPLOYMENT_MODEL_QUALIFIED_NAME: ClassVar[Any] = None + SAGE_MAKER_STATUS: ClassVar[Any] = None + SAGE_MAKER_ENDPOINT_CONFIG_NAME: ClassVar[Any] = None + SAGE_MAKER_MODEL_NAME: ClassVar[Any] = None + SAGE_MAKER_MODEL_QUALIFIED_NAME: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -116,20 +116,18 @@ class SageMakerModelDeployment(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_model_deployment_status: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerModelDeployment" + + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the endpoint (e.g., InService, OutOfService, Creating, Failed).""" - sage_maker_model_deployment_endpoint_config_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_endpoint_config_name: Union[str, None, UnsetType] = UNSET """Name of the endpoint configuration used by this deployment.""" - sage_maker_model_deployment_model_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model.""" - sage_maker_model_deployment_model_qualified_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -306,80 +304,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerModelDeployment instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sage_maker_model is UNSET: - errors.append("sage_maker_model is required for creation") - if self.sage_maker_model_name is UNSET: - errors.append("sage_maker_model_name is required for creation") - if self.sage_maker_model_qualified_name is UNSET: - errors.append( - "sage_maker_model_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SageMakerModelDeployment validation failed: {errors}") - - def minimize(self) -> "SageMakerModelDeployment": - """ - Return a minimal copy of this SageMakerModelDeployment with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerModelDeployment with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerModelDeployment instance with only the minimum required fields. - """ - self.validate() - return SageMakerModelDeployment( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerModelDeployment": - """ - Create a :class:`RelatedSageMakerModelDeployment` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerModelDeployment reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerModelDeployment(guid=self.guid) - return RelatedSageMakerModelDeployment(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -437,20 +361,16 @@ def from_json( class SageMakerModelDeploymentAttributes(AssetAttributes): """SageMakerModelDeployment-specific attributes for nested API format.""" - sage_maker_model_deployment_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the endpoint (e.g., InService, OutOfService, Creating, Failed).""" - sage_maker_model_deployment_endpoint_config_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_endpoint_config_name: Union[str, None, UnsetType] = UNSET """Name of the endpoint configuration used by this deployment.""" - sage_maker_model_deployment_model_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model.""" - sage_maker_model_deployment_model_qualified_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -679,16 +599,10 @@ def _populate_sage_maker_model_deployment_attrs( ) -> None: """Populate SageMakerModelDeployment-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_model_deployment_status = obj.sage_maker_model_deployment_status - attrs.sage_maker_model_deployment_endpoint_config_name = ( - obj.sage_maker_model_deployment_endpoint_config_name - ) - attrs.sage_maker_model_deployment_model_name = ( - obj.sage_maker_model_deployment_model_name - ) - attrs.sage_maker_model_deployment_model_qualified_name = ( - obj.sage_maker_model_deployment_model_qualified_name - ) + attrs.sage_maker_status = obj.sage_maker_status + attrs.sage_maker_endpoint_config_name = obj.sage_maker_endpoint_config_name + attrs.sage_maker_model_name = obj.sage_maker_model_name + attrs.sage_maker_model_qualified_name = obj.sage_maker_model_qualified_name attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -718,18 +632,10 @@ def _extract_sage_maker_model_deployment_attrs( ) -> dict: """Extract all SageMakerModelDeployment attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_model_deployment_status"] = ( - attrs.sage_maker_model_deployment_status - ) - result["sage_maker_model_deployment_endpoint_config_name"] = ( - attrs.sage_maker_model_deployment_endpoint_config_name - ) - result["sage_maker_model_deployment_model_name"] = ( - attrs.sage_maker_model_deployment_model_name - ) - result["sage_maker_model_deployment_model_qualified_name"] = ( - attrs.sage_maker_model_deployment_model_qualified_name - ) + result["sage_maker_status"] = attrs.sage_maker_status + result["sage_maker_endpoint_config_name"] = attrs.sage_maker_endpoint_config_name + result["sage_maker_model_name"] = attrs.sage_maker_model_name + result["sage_maker_model_qualified_name"] = attrs.sage_maker_model_qualified_name result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -794,9 +700,6 @@ def _sage_maker_model_deployment_to_nested( is_incomplete=sage_maker_model_deployment.is_incomplete, provenance_type=sage_maker_model_deployment.provenance_type, home_id=sage_maker_model_deployment.home_id, - depth=sage_maker_model_deployment.depth, - immediate_upstream=sage_maker_model_deployment.immediate_upstream, - immediate_downstream=sage_maker_model_deployment.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -832,6 +735,7 @@ def _sage_maker_model_deployment_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -840,9 +744,6 @@ def _sage_maker_model_deployment_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_model_deployment_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -875,23 +776,17 @@ def _sage_maker_model_deployment_from_nested_bytes( RelationField, ) -SageMakerModelDeployment.SAGE_MAKER_MODEL_DEPLOYMENT_STATUS = KeywordField( - "sageMakerModelDeploymentStatus", "sageMakerModelDeploymentStatus" +SageMakerModelDeployment.SAGE_MAKER_STATUS = KeywordField( + "sageMakerStatus", "sageMakerStatus" ) -SageMakerModelDeployment.SAGE_MAKER_MODEL_DEPLOYMENT_ENDPOINT_CONFIG_NAME = ( - KeywordField( - "sageMakerModelDeploymentEndpointConfigName", - "sageMakerModelDeploymentEndpointConfigName", - ) +SageMakerModelDeployment.SAGE_MAKER_ENDPOINT_CONFIG_NAME = KeywordField( + "sageMakerEndpointConfigName", "sageMakerEndpointConfigName" ) -SageMakerModelDeployment.SAGE_MAKER_MODEL_DEPLOYMENT_MODEL_NAME = KeywordField( - "sageMakerModelDeploymentModelName", "sageMakerModelDeploymentModelName" +SageMakerModelDeployment.SAGE_MAKER_MODEL_NAME = KeywordField( + "sageMakerModelName", "sageMakerModelName" ) -SageMakerModelDeployment.SAGE_MAKER_MODEL_DEPLOYMENT_MODEL_QUALIFIED_NAME = ( - KeywordField( - "sageMakerModelDeploymentModelQualifiedName", - "sageMakerModelDeploymentModelQualifiedName", - ) +SageMakerModelDeployment.SAGE_MAKER_MODEL_QUALIFIED_NAME = KeywordField( + "sageMakerModelQualifiedName", "sageMakerModelQualifiedName" ) SageMakerModelDeployment.SAGE_MAKER_S3_URI = KeywordField( "sageMakerS3Uri", "sageMakerS3Uri" diff --git a/pyatlan_v9/model/assets/sage_maker_model_group.py b/pyatlan_v9/model/assets/sage_maker_model_group.py index 56c1fa6be..0ee9d3c39 100644 --- a/pyatlan_v9/model/assets/sage_maker_model_group.py +++ b/pyatlan_v9/model/assets/sage_maker_model_group.py @@ -48,7 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMakerModel, RelatedSageMakerModelGroup +from .sage_maker_related import RelatedSageMakerModel from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -64,7 +64,7 @@ class SageMakerModelGroup(Asset): Instance of a SageMaker Model Package Group in Atlan. Represents a collection of versioned models that can be organized and managed together. """ - SAGE_MAKER_MODEL_GROUP_STATUS: ClassVar[Any] = None + SAGE_MAKER_STATUS: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -118,7 +118,9 @@ class SageMakerModelGroup(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_model_group_status: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerModelGroup" + + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Model Package Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -304,66 +306,6 @@ class SageMakerModelGroup(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerModelGroup" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerModelGroup instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMakerModelGroup validation failed: {errors}") - - def minimize(self) -> "SageMakerModelGroup": - """ - Return a minimal copy of this SageMakerModelGroup with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerModelGroup with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerModelGroup instance with only the minimum required fields. - """ - self.validate() - return SageMakerModelGroup(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMakerModelGroup": - """ - Create a :class:`RelatedSageMakerModelGroup` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerModelGroup reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerModelGroup(guid=self.guid) - return RelatedSageMakerModelGroup(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -421,7 +363,7 @@ def from_json( class SageMakerModelGroupAttributes(AssetAttributes): """SageMakerModelGroup-specific attributes for nested API format.""" - sage_maker_model_group_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Model Package Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -669,7 +611,7 @@ def _populate_sage_maker_model_group_attrs( ) -> None: """Populate SageMakerModelGroup-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_model_group_status = obj.sage_maker_model_group_status + attrs.sage_maker_status = obj.sage_maker_status attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -700,7 +642,7 @@ def _populate_sage_maker_model_group_attrs( def _extract_sage_maker_model_group_attrs(attrs: SageMakerModelGroupAttributes) -> dict: """Extract all SageMakerModelGroup attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_model_group_status"] = attrs.sage_maker_model_group_status + result["sage_maker_status"] = attrs.sage_maker_status result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -768,9 +710,6 @@ def _sage_maker_model_group_to_nested( is_incomplete=sage_maker_model_group.is_incomplete, provenance_type=sage_maker_model_group.provenance_type, home_id=sage_maker_model_group.home_id, - depth=sage_maker_model_group.depth, - immediate_upstream=sage_maker_model_group.immediate_upstream, - immediate_downstream=sage_maker_model_group.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -806,6 +745,7 @@ def _sage_maker_model_group_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -814,9 +754,6 @@ def _sage_maker_model_group_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_model_group_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -847,8 +784,8 @@ def _sage_maker_model_group_from_nested_bytes( RelationField, ) -SageMakerModelGroup.SAGE_MAKER_MODEL_GROUP_STATUS = KeywordField( - "sageMakerModelGroupStatus", "sageMakerModelGroupStatus" +SageMakerModelGroup.SAGE_MAKER_STATUS = KeywordField( + "sageMakerStatus", "sageMakerStatus" ) SageMakerModelGroup.SAGE_MAKER_S3_URI = KeywordField("sageMakerS3Uri", "sageMakerS3Uri") SageMakerModelGroup.ETHICAL_AI_PRIVACY_CONFIG = KeywordField( diff --git a/pyatlan_v9/model/assets/sage_maker_related.py b/pyatlan_v9/model/assets/sage_maker_related.py index 07cd28170..2589e80dd 100644 --- a/pyatlan_v9/model/assets/sage_maker_related.py +++ b/pyatlan_v9/model/assets/sage_maker_related.py @@ -56,19 +56,19 @@ class RelatedSageMakerFeatureGroup(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerFeatureGroup" so it serializes correctly - sage_maker_feature_group_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Feature Group (e.g., Created, Creating, Failed).""" - sage_maker_feature_group_record_id_name: Union[str, None, UnsetType] = UNSET + sage_maker_record_id_name: Union[str, None, UnsetType] = UNSET """Name of the feature that serves as the record identifier.""" - sage_maker_feature_group_glue_database_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_database_name: Union[str, None, UnsetType] = UNSET """AWS Glue database name associated with this Feature Group.""" - sage_maker_feature_group_glue_table_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_table_name: Union[str, None, UnsetType] = UNSET """AWS Glue table name associated with this Feature Group.""" - sage_maker_feature_group_feature_count: Union[int, None, UnsetType] = UNSET + sage_maker_feature_count: Union[int, None, UnsetType] = UNSET """Number of features in this Feature Group.""" def __post_init__(self) -> None: @@ -86,16 +86,16 @@ class RelatedSageMakerFeature(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerFeature" so it serializes correctly - sage_maker_feature_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_name: Union[str, None, UnsetType] = UNSET """Name of the Feature Group that contains this feature.""" - sage_maker_feature_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the Feature Group that contains this feature.""" - sage_maker_feature_data_type: Union[str, None, UnsetType] = UNSET + sage_maker_data_type: Union[str, None, UnsetType] = UNSET """Data type of the feature (e.g., String, Integral, Fractional).""" - sage_maker_feature_is_record_identifier: Union[bool, None, UnsetType] = UNSET + sage_maker_is_record_identifier: Union[bool, None, UnsetType] = UNSET """Whether this feature serves as the record identifier for the Feature Group.""" def __post_init__(self) -> None: @@ -113,22 +113,22 @@ class RelatedSageMakerModel(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerModel" so it serializes correctly - sage_maker_model_container_image: Union[str, None, UnsetType] = UNSET + sage_maker_container_image: Union[str, None, UnsetType] = UNSET """Docker container image used for the model.""" - sage_maker_model_execution_role_arn: Union[str, None, UnsetType] = UNSET + sage_maker_execution_role_arn: Union[str, None, UnsetType] = UNSET """ARN of the IAM role used by the model for accessing AWS resources.""" - sage_maker_model_model_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model Group.""" - sage_maker_model_model_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model Group.""" - sage_maker_model_version: Union[str, None, UnsetType] = UNSET + sage_maker_version: Union[str, None, UnsetType] = UNSET """Version of the SageMaker Model Package.""" - sage_maker_model_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Status of the SageMaker Model Package (ACTIVE or INACTIVE).""" def __post_init__(self) -> None: @@ -146,7 +146,7 @@ class RelatedSageMakerModelGroup(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerModelGroup" so it serializes correctly - sage_maker_model_group_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Model Package Group.""" def __post_init__(self) -> None: @@ -164,20 +164,16 @@ class RelatedSageMakerModelDeployment(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerModelDeployment" so it serializes correctly - sage_maker_model_deployment_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the endpoint (e.g., InService, OutOfService, Creating, Failed).""" - sage_maker_model_deployment_endpoint_config_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_endpoint_config_name: Union[str, None, UnsetType] = UNSET """Name of the endpoint configuration used by this deployment.""" - sage_maker_model_deployment_model_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model.""" - sage_maker_model_deployment_model_qualified_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio.py b/pyatlan_v9/model/assets/sage_maker_unified_studio.py index ebdfb2e50..fd5d4a29d 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio.py @@ -46,7 +46,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_unified_studio_related import RelatedSageMakerUnifiedStudio from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -97,6 +96,8 @@ class SageMakerUnifiedStudio(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudio" + smus_domain_name: Union[str, None, UnsetType] = UNSET """Name of the SageMaker Unified Studio domain.""" @@ -210,68 +211,6 @@ class SageMakerUnifiedStudio(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerUnifiedStudio" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudio instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMakerUnifiedStudio validation failed: {errors}") - - def minimize(self) -> "SageMakerUnifiedStudio": - """ - Return a minimal copy of this SageMakerUnifiedStudio with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudio with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudio instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudio( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudio": - """ - Create a :class:`RelatedSageMakerUnifiedStudio` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudio reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudio(guid=self.guid) - return RelatedSageMakerUnifiedStudio(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -560,9 +499,6 @@ def _sage_maker_unified_studio_to_nested( is_incomplete=sage_maker_unified_studio.is_incomplete, provenance_type=sage_maker_unified_studio.provenance_type, home_id=sage_maker_unified_studio.home_id, - depth=sage_maker_unified_studio.depth, - immediate_upstream=sage_maker_unified_studio.immediate_upstream, - immediate_downstream=sage_maker_unified_studio.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -598,6 +534,7 @@ def _sage_maker_unified_studio_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -606,9 +543,6 @@ def _sage_maker_unified_studio_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_asset.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_asset.py index 9de8193ae..e805bbe7b 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_asset.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_asset.py @@ -46,10 +46,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_unified_studio_related import ( - RelatedSageMakerUnifiedStudioAsset, - RelatedSageMakerUnifiedStudioAssetSchema, -) +from .sage_maker_unified_studio_related import RelatedSageMakerUnifiedStudioAssetSchema from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -106,6 +103,8 @@ class SageMakerUnifiedStudioAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioAsset" + smus_asset_summary: Union[str, None, UnsetType] = UNSET """Summary text for the asset in SageMaker Unified Studio.""" @@ -239,68 +238,6 @@ class SageMakerUnifiedStudioAsset(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerUnifiedStudioAsset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMakerUnifiedStudioAsset validation failed: {errors}") - - def minimize(self) -> "SageMakerUnifiedStudioAsset": - """ - Return a minimal copy of this SageMakerUnifiedStudioAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioAsset instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioAsset( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioAsset": - """ - Create a :class:`RelatedSageMakerUnifiedStudioAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioAsset(guid=self.guid) - return RelatedSageMakerUnifiedStudioAsset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -622,9 +559,6 @@ def _sage_maker_unified_studio_asset_to_nested( is_incomplete=sage_maker_unified_studio_asset.is_incomplete, provenance_type=sage_maker_unified_studio_asset.provenance_type, home_id=sage_maker_unified_studio_asset.home_id, - depth=sage_maker_unified_studio_asset.depth, - immediate_upstream=sage_maker_unified_studio_asset.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -660,6 +594,7 @@ def _sage_maker_unified_studio_asset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -668,9 +603,6 @@ def _sage_maker_unified_studio_asset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_asset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_asset_schema.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_asset_schema.py index ed65d34fd..bfe42816d 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_asset_schema.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_asset_schema.py @@ -47,10 +47,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_unified_studio_related import ( - RelatedSageMakerUnifiedStudioAsset, - RelatedSageMakerUnifiedStudioAssetSchema, -) +from .sage_maker_unified_studio_related import RelatedSageMakerUnifiedStudioAsset from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -105,6 +102,8 @@ class SageMakerUnifiedStudioAssetSchema(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioAssetSchema" + smus_data_type: Union[str, None, UnsetType] = UNSET """Data type of the schema/column.""" @@ -236,78 +235,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioAssetSchema instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.smus_asset is UNSET: - errors.append("smus_asset is required for creation") - if errors: - raise ValueError( - f"SageMakerUnifiedStudioAssetSchema validation failed: {errors}" - ) - - def minimize(self) -> "SageMakerUnifiedStudioAssetSchema": - """ - Return a minimal copy of this SageMakerUnifiedStudioAssetSchema with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioAssetSchema with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioAssetSchema instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioAssetSchema( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioAssetSchema": - """ - Create a :class:`RelatedSageMakerUnifiedStudioAssetSchema` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioAssetSchema reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioAssetSchema(guid=self.guid) - return RelatedSageMakerUnifiedStudioAssetSchema( - qualified_name=self.qualified_name - ) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -622,9 +549,6 @@ def _sage_maker_unified_studio_asset_schema_to_nested( is_incomplete=sage_maker_unified_studio_asset_schema.is_incomplete, provenance_type=sage_maker_unified_studio_asset_schema.provenance_type, home_id=sage_maker_unified_studio_asset_schema.home_id, - depth=sage_maker_unified_studio_asset_schema.depth, - immediate_upstream=sage_maker_unified_studio_asset_schema.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_asset_schema.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -660,6 +584,7 @@ def _sage_maker_unified_studio_asset_schema_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -668,9 +593,6 @@ def _sage_maker_unified_studio_asset_schema_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_asset_schema_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_project.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_project.py index 1cd35f630..255d4034b 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_project.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_project.py @@ -47,7 +47,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .sage_maker_unified_studio_related import ( - RelatedSageMakerUnifiedStudioProject, RelatedSageMakerUnifiedStudioPublishedAsset, RelatedSageMakerUnifiedStudioSubscribedAsset, ) @@ -107,6 +106,8 @@ class SageMakerUnifiedStudioProject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioProject" + smus_project_status: Union[str, None, UnsetType] = UNSET """Status of the SageMaker Unified Studio project.""" @@ -242,70 +243,6 @@ class SageMakerUnifiedStudioProject(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerUnifiedStudioProject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError( - f"SageMakerUnifiedStudioProject validation failed: {errors}" - ) - - def minimize(self) -> "SageMakerUnifiedStudioProject": - """ - Return a minimal copy of this SageMakerUnifiedStudioProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioProject instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioProject( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioProject": - """ - Create a :class:`RelatedSageMakerUnifiedStudioProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioProject(guid=self.guid) - return RelatedSageMakerUnifiedStudioProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -628,9 +565,6 @@ def _sage_maker_unified_studio_project_to_nested( is_incomplete=sage_maker_unified_studio_project.is_incomplete, provenance_type=sage_maker_unified_studio_project.provenance_type, home_id=sage_maker_unified_studio_project.home_id, - depth=sage_maker_unified_studio_project.depth, - immediate_upstream=sage_maker_unified_studio_project.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -666,6 +600,7 @@ def _sage_maker_unified_studio_project_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -674,9 +609,6 @@ def _sage_maker_unified_studio_project_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_project_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_published_asset.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_published_asset.py index 1fdedbcb6..5cfb94025 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_published_asset.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_published_asset.py @@ -50,7 +50,6 @@ from .sage_maker_unified_studio_related import ( RelatedSageMakerUnifiedStudioAssetSchema, RelatedSageMakerUnifiedStudioProject, - RelatedSageMakerUnifiedStudioPublishedAsset, RelatedSageMakerUnifiedStudioSubscribedAsset, ) from .schema_registry_related import RelatedSchemaRegistrySubject @@ -112,6 +111,8 @@ class SageMakerUnifiedStudioPublishedAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioPublishedAsset" + smus_published_asset_subscriptions_count: Union[int, None, UnsetType] = UNSET """Number of subscriptions for the published asset.""" @@ -262,78 +263,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioPublishedAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.smus_project is UNSET: - errors.append("smus_project is required for creation") - if errors: - raise ValueError( - f"SageMakerUnifiedStudioPublishedAsset validation failed: {errors}" - ) - - def minimize(self) -> "SageMakerUnifiedStudioPublishedAsset": - """ - Return a minimal copy of this SageMakerUnifiedStudioPublishedAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioPublishedAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioPublishedAsset instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioPublishedAsset( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioPublishedAsset": - """ - Create a :class:`RelatedSageMakerUnifiedStudioPublishedAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioPublishedAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioPublishedAsset(guid=self.guid) - return RelatedSageMakerUnifiedStudioPublishedAsset( - qualified_name=self.qualified_name - ) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -679,9 +608,6 @@ def _sage_maker_unified_studio_published_asset_to_nested( is_incomplete=sage_maker_unified_studio_published_asset.is_incomplete, provenance_type=sage_maker_unified_studio_published_asset.provenance_type, home_id=sage_maker_unified_studio_published_asset.home_id, - depth=sage_maker_unified_studio_published_asset.depth, - immediate_upstream=sage_maker_unified_studio_published_asset.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_published_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -717,6 +643,7 @@ def _sage_maker_unified_studio_published_asset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -725,9 +652,6 @@ def _sage_maker_unified_studio_published_asset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_published_asset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_subscribed_asset.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_subscribed_asset.py index da7311314..a07782775 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_subscribed_asset.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_subscribed_asset.py @@ -51,7 +51,6 @@ RelatedSageMakerUnifiedStudioAssetSchema, RelatedSageMakerUnifiedStudioProject, RelatedSageMakerUnifiedStudioPublishedAsset, - RelatedSageMakerUnifiedStudioSubscribedAsset, ) from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck @@ -119,6 +118,8 @@ class SageMakerUnifiedStudioSubscribedAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioSubscribedAsset" + smus_subscribed_asset_project_name: Union[str, None, UnsetType] = UNSET """Name of the SageMaker Unified Studio project from which this asset is subscribed.""" @@ -290,78 +291,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioSubscribedAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.smus_project is UNSET: - errors.append("smus_project is required for creation") - if errors: - raise ValueError( - f"SageMakerUnifiedStudioSubscribedAsset validation failed: {errors}" - ) - - def minimize(self) -> "SageMakerUnifiedStudioSubscribedAsset": - """ - Return a minimal copy of this SageMakerUnifiedStudioSubscribedAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioSubscribedAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioSubscribedAsset instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioSubscribedAsset( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioSubscribedAsset": - """ - Create a :class:`RelatedSageMakerUnifiedStudioSubscribedAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioSubscribedAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioSubscribedAsset(guid=self.guid) - return RelatedSageMakerUnifiedStudioSubscribedAsset( - qualified_name=self.qualified_name - ) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -764,9 +693,6 @@ def _sage_maker_unified_studio_subscribed_asset_to_nested( is_incomplete=sage_maker_unified_studio_subscribed_asset.is_incomplete, provenance_type=sage_maker_unified_studio_subscribed_asset.provenance_type, home_id=sage_maker_unified_studio_subscribed_asset.home_id, - depth=sage_maker_unified_studio_subscribed_asset.depth, - immediate_upstream=sage_maker_unified_studio_subscribed_asset.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_subscribed_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -802,6 +728,7 @@ def _sage_maker_unified_studio_subscribed_asset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -810,9 +737,6 @@ def _sage_maker_unified_studio_subscribed_asset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_subscribed_asset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/salesforce.py b/pyatlan_v9/model/assets/salesforce.py index 61c00c35f..db3dd8f95 100644 --- a/pyatlan_v9/model/assets/salesforce.py +++ b/pyatlan_v9/model/assets/salesforce.py @@ -46,7 +46,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .salesforce_related import RelatedSalesforce from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -93,6 +92,8 @@ class Salesforce(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Salesforce" + organization_qualified_name: Union[str, None, UnsetType] = UNSET """Fully-qualified name of the organization in Salesforce.""" @@ -194,66 +195,6 @@ class Salesforce(Asset): def __post_init__(self) -> None: self.type_name = "Salesforce" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Salesforce instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Salesforce validation failed: {errors}") - - def minimize(self) -> "Salesforce": - """ - Return a minimal copy of this Salesforce with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Salesforce with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Salesforce instance with only the minimum required fields. - """ - self.validate() - return Salesforce(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforce": - """ - Create a :class:`RelatedSalesforce` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforce reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforce(guid=self.guid) - return RelatedSalesforce(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -510,9 +451,6 @@ def _salesforce_to_nested(salesforce: Salesforce) -> SalesforceNested: is_incomplete=salesforce.is_incomplete, provenance_type=salesforce.provenance_type, home_id=salesforce.home_id, - depth=salesforce.depth, - immediate_upstream=salesforce.immediate_upstream, - immediate_downstream=salesforce.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -544,6 +482,7 @@ def _salesforce_from_nested(nested: SalesforceNested) -> Salesforce: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -552,9 +491,6 @@ def _salesforce_from_nested(nested: SalesforceNested) -> Salesforce: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/salesforce_dashboard.py b/pyatlan_v9/model/assets/salesforce_dashboard.py index 6a48ecfcd..9071f5004 100644 --- a/pyatlan_v9/model/assets/salesforce_dashboard.py +++ b/pyatlan_v9/model/assets/salesforce_dashboard.py @@ -47,11 +47,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .salesforce_related import ( - RelatedSalesforceDashboard, - RelatedSalesforceOrganization, - RelatedSalesforceReport, -) +from .salesforce_related import RelatedSalesforceOrganization, RelatedSalesforceReport from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -103,6 +99,8 @@ class SalesforceDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceDashboard" + source_id: Union[str, None, UnsetType] = UNSET """Identifier of the dashboard in Salesforce.""" @@ -225,74 +223,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.organization is UNSET: - errors.append("organization is required for creation") - if self.organization_qualified_name is UNSET: - errors.append("organization_qualified_name is required for creation") - if errors: - raise ValueError(f"SalesforceDashboard validation failed: {errors}") - - def minimize(self) -> "SalesforceDashboard": - """ - Return a minimal copy of this SalesforceDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceDashboard instance with only the minimum required fields. - """ - self.validate() - return SalesforceDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforceDashboard": - """ - Create a :class:`RelatedSalesforceDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceDashboard(guid=self.guid) - return RelatedSalesforceDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -582,9 +512,6 @@ def _salesforce_dashboard_to_nested( is_incomplete=salesforce_dashboard.is_incomplete, provenance_type=salesforce_dashboard.provenance_type, home_id=salesforce_dashboard.home_id, - depth=salesforce_dashboard.depth, - immediate_upstream=salesforce_dashboard.immediate_upstream, - immediate_downstream=salesforce_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -620,6 +547,7 @@ def _salesforce_dashboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -628,9 +556,6 @@ def _salesforce_dashboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/salesforce_field.py b/pyatlan_v9/model/assets/salesforce_field.py index bb56ea052..5a2f66207 100644 --- a/pyatlan_v9/model/assets/salesforce_field.py +++ b/pyatlan_v9/model/assets/salesforce_field.py @@ -47,7 +47,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .salesforce_related import RelatedSalesforceField, RelatedSalesforceObject +from .salesforce_related import RelatedSalesforceObject from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -112,6 +112,8 @@ class SalesforceField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceField" + data_type: Union[str, None, UnsetType] = UNSET """Data type of values in this field.""" @@ -275,76 +277,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.object is UNSET: - errors.append("object is required for creation") - if self.object_qualified_name is UNSET: - errors.append("object_qualified_name is required for creation") - if self.organization_qualified_name is UNSET: - errors.append("organization_qualified_name is required for creation") - if errors: - raise ValueError(f"SalesforceField validation failed: {errors}") - - def minimize(self) -> "SalesforceField": - """ - Return a minimal copy of this SalesforceField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceField instance with only the minimum required fields. - """ - self.validate() - return SalesforceField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforceField": - """ - Create a :class:`RelatedSalesforceField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceField(guid=self.guid) - return RelatedSalesforceField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -699,9 +631,6 @@ def _salesforce_field_to_nested( is_incomplete=salesforce_field.is_incomplete, provenance_type=salesforce_field.provenance_type, home_id=salesforce_field.home_id, - depth=salesforce_field.depth, - immediate_upstream=salesforce_field.immediate_upstream, - immediate_downstream=salesforce_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -735,6 +664,7 @@ def _salesforce_field_from_nested(nested: SalesforceFieldNested) -> SalesforceFi updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -743,9 +673,6 @@ def _salesforce_field_from_nested(nested: SalesforceFieldNested) -> SalesforceFi is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/salesforce_object.py b/pyatlan_v9/model/assets/salesforce_object.py index b8fa6831e..d1e83cfdb 100644 --- a/pyatlan_v9/model/assets/salesforce_object.py +++ b/pyatlan_v9/model/assets/salesforce_object.py @@ -47,11 +47,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .salesforce_related import ( - RelatedSalesforceField, - RelatedSalesforceObject, - RelatedSalesforceOrganization, -) +from .salesforce_related import RelatedSalesforceField, RelatedSalesforceOrganization from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -105,6 +101,8 @@ class SalesforceObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceObject" + is_custom: Union[bool, None, UnsetType] = UNSET """Whether this object is a custom object (true) or not (false).""" @@ -233,74 +231,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.organization is UNSET: - errors.append("organization is required for creation") - if self.organization_qualified_name is UNSET: - errors.append("organization_qualified_name is required for creation") - if errors: - raise ValueError(f"SalesforceObject validation failed: {errors}") - - def minimize(self) -> "SalesforceObject": - """ - Return a minimal copy of this SalesforceObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceObject instance with only the minimum required fields. - """ - self.validate() - return SalesforceObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforceObject": - """ - Create a :class:`RelatedSalesforceObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceObject(guid=self.guid) - return RelatedSalesforceObject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -599,9 +529,6 @@ def _salesforce_object_to_nested( is_incomplete=salesforce_object.is_incomplete, provenance_type=salesforce_object.provenance_type, home_id=salesforce_object.home_id, - depth=salesforce_object.depth, - immediate_upstream=salesforce_object.immediate_upstream, - immediate_downstream=salesforce_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -635,6 +562,7 @@ def _salesforce_object_from_nested(nested: SalesforceObjectNested) -> Salesforce updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -643,9 +571,6 @@ def _salesforce_object_from_nested(nested: SalesforceObjectNested) -> Salesforce is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_object_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/salesforce_organization.py b/pyatlan_v9/model/assets/salesforce_organization.py index 75d99644c..6c5a20070 100644 --- a/pyatlan_v9/model/assets/salesforce_organization.py +++ b/pyatlan_v9/model/assets/salesforce_organization.py @@ -49,7 +49,6 @@ from .salesforce_related import ( RelatedSalesforceDashboard, RelatedSalesforceObject, - RelatedSalesforceOrganization, RelatedSalesforceReport, ) from .schema_registry_related import RelatedSchemaRegistrySubject @@ -102,6 +101,8 @@ class SalesforceOrganization(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceOrganization" + source_id: Union[str, None, UnsetType] = UNSET """Identifier of the organization in Salesforce.""" @@ -215,68 +216,6 @@ class SalesforceOrganization(Asset): def __post_init__(self) -> None: self.type_name = "SalesforceOrganization" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceOrganization instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SalesforceOrganization validation failed: {errors}") - - def minimize(self) -> "SalesforceOrganization": - """ - Return a minimal copy of this SalesforceOrganization with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceOrganization with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceOrganization instance with only the minimum required fields. - """ - self.validate() - return SalesforceOrganization( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSalesforceOrganization": - """ - Create a :class:`RelatedSalesforceOrganization` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceOrganization reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceOrganization(guid=self.guid) - return RelatedSalesforceOrganization(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -562,9 +501,6 @@ def _salesforce_organization_to_nested( is_incomplete=salesforce_organization.is_incomplete, provenance_type=salesforce_organization.provenance_type, home_id=salesforce_organization.home_id, - depth=salesforce_organization.depth, - immediate_upstream=salesforce_organization.immediate_upstream, - immediate_downstream=salesforce_organization.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -600,6 +536,7 @@ def _salesforce_organization_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -608,9 +545,6 @@ def _salesforce_organization_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_organization_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/salesforce_report.py b/pyatlan_v9/model/assets/salesforce_report.py index dffbb859c..95a20fd50 100644 --- a/pyatlan_v9/model/assets/salesforce_report.py +++ b/pyatlan_v9/model/assets/salesforce_report.py @@ -50,7 +50,6 @@ from .salesforce_related import ( RelatedSalesforceDashboard, RelatedSalesforceOrganization, - RelatedSalesforceReport, ) from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck @@ -103,6 +102,8 @@ class SalesforceReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceReport" + source_id: Union[str, None, UnsetType] = UNSET """Identifier of the report in Salesforce.""" @@ -225,74 +226,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.organization is UNSET: - errors.append("organization is required for creation") - if self.organization_qualified_name is UNSET: - errors.append("organization_qualified_name is required for creation") - if errors: - raise ValueError(f"SalesforceReport validation failed: {errors}") - - def minimize(self) -> "SalesforceReport": - """ - Return a minimal copy of this SalesforceReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceReport instance with only the minimum required fields. - """ - self.validate() - return SalesforceReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforceReport": - """ - Create a :class:`RelatedSalesforceReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceReport(guid=self.guid) - return RelatedSalesforceReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -582,9 +515,6 @@ def _salesforce_report_to_nested( is_incomplete=salesforce_report.is_incomplete, provenance_type=salesforce_report.provenance_type, home_id=salesforce_report.home_id, - depth=salesforce_report.depth, - immediate_upstream=salesforce_report.immediate_upstream, - immediate_downstream=salesforce_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -618,6 +548,7 @@ def _salesforce_report_from_nested(nested: SalesforceReportNested) -> Salesforce updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -626,9 +557,6 @@ def _salesforce_report_from_nested(nested: SalesforceReportNested) -> Salesforce is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_report_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sap.py b/pyatlan_v9/model/assets/sap.py index 224f2f931..74abeec6e 100644 --- a/pyatlan_v9/model/assets/sap.py +++ b/pyatlan_v9/model/assets/sap.py @@ -46,7 +46,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import RelatedSAP from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -98,6 +97,8 @@ class SAP(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SAP" + sap_technical_name: Union[str, None, UnsetType] = UNSET """Technical identifier for SAP data objects, used for integration and internal reference.""" @@ -214,66 +215,6 @@ class SAP(Asset): def __post_init__(self) -> None: self.type_name = "SAP" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SAP instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SAP validation failed: {errors}") - - def minimize(self) -> "SAP": - """ - Return a minimal copy of this SAP with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SAP with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SAP instance with only the minimum required fields. - """ - self.validate() - return SAP(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSAP": - """ - Create a :class:`RelatedSAP` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSAP reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSAP(guid=self.guid) - return RelatedSAP(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -551,9 +492,6 @@ def _sap_to_nested(sap: SAP) -> SAPNested: is_incomplete=sap.is_incomplete, provenance_type=sap.provenance_type, home_id=sap.home_id, - depth=sap.depth, - immediate_upstream=sap.immediate_upstream, - immediate_downstream=sap.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -583,6 +521,7 @@ def _sap_from_nested(nested: SAPNested) -> SAP: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -591,9 +530,6 @@ def _sap_from_nested(nested: SAPNested) -> SAP: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sap_erp_abap_program.py b/pyatlan_v9/model/assets/sap_erp_abap_program.py index 00b213451..f300e0e50 100644 --- a/pyatlan_v9/model/assets/sap_erp_abap_program.py +++ b/pyatlan_v9/model/assets/sap_erp_abap_program.py @@ -47,7 +47,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .sap_related import ( - RelatedSapErpAbapProgram, RelatedSapErpComponent, RelatedSapErpFunctionModule, RelatedSapErpTransactionCode, @@ -107,6 +106,8 @@ class SapErpAbapProgram(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SapErpAbapProgram" + sap_erp_abap_program_type: Union[str, None, UnsetType] = UNSET """Specifies the type of ABAP program in SAP ERP (e.g., Report, Module Pool, Function Group).""" @@ -239,66 +240,6 @@ class SapErpAbapProgram(Asset): def __post_init__(self) -> None: self.type_name = "SapErpAbapProgram" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpAbapProgram instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpAbapProgram validation failed: {errors}") - - def minimize(self) -> "SapErpAbapProgram": - """ - Return a minimal copy of this SapErpAbapProgram with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpAbapProgram with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpAbapProgram instance with only the minimum required fields. - """ - self.validate() - return SapErpAbapProgram(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpAbapProgram": - """ - Create a :class:`RelatedSapErpAbapProgram` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpAbapProgram reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpAbapProgram(guid=self.guid) - return RelatedSapErpAbapProgram(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -611,9 +552,6 @@ def _sap_erp_abap_program_to_nested( is_incomplete=sap_erp_abap_program.is_incomplete, provenance_type=sap_erp_abap_program.provenance_type, home_id=sap_erp_abap_program.home_id, - depth=sap_erp_abap_program.depth, - immediate_upstream=sap_erp_abap_program.immediate_upstream, - immediate_downstream=sap_erp_abap_program.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -649,6 +587,7 @@ def _sap_erp_abap_program_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -657,9 +596,6 @@ def _sap_erp_abap_program_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_abap_program_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sap_erp_cds_view.py b/pyatlan_v9/model/assets/sap_erp_cds_view.py index 4f4fb635f..bfe0c871e 100644 --- a/pyatlan_v9/model/assets/sap_erp_cds_view.py +++ b/pyatlan_v9/model/assets/sap_erp_cds_view.py @@ -46,11 +46,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import ( - RelatedSapErpCdsView, - RelatedSapErpColumn, - RelatedSapErpComponent, -) +from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -66,10 +62,9 @@ class SapErpCdsView(Asset): Instance of a SAP CDS View in Atlan. """ - SAP_ERP_CDS_VIEW_TECHNICAL_NAME: ClassVar[Any] = None - SAP_ERP_CDS_VIEW_SOURCE_NAME: ClassVar[Any] = None - SAP_ERP_CDS_VIEW_SOURCE_TYPE: ClassVar[Any] = None SAP_TECHNICAL_NAME: ClassVar[Any] = None + SAP_SOURCE_NAME: ClassVar[Any] = None + SAP_SOURCE_TYPE: ClassVar[Any] = None SAP_LOGICAL_NAME: ClassVar[Any] = None SAP_PACKAGE_NAME: ClassVar[Any] = None SAP_COMPONENT_NAME: ClassVar[Any] = None @@ -107,18 +102,17 @@ class SapErpCdsView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sap_erp_cds_view_technical_name: Union[str, None, UnsetType] = UNSET - """The technical database view name of the SAP ERP CDS View.""" + type_name: Union[str, UnsetType] = "SapErpCdsView" - sap_erp_cds_view_source_name: Union[str, None, UnsetType] = UNSET + sap_technical_name: Union[str, None, UnsetType] = UNSET + """Technical identifier for SAP data objects, used for integration and internal reference.""" + + sap_source_name: Union[str, None, UnsetType] = UNSET """The source name of the SAP ERP CDS View Definition.""" - sap_erp_cds_view_source_type: Union[str, None, UnsetType] = UNSET + sap_source_type: Union[str, None, UnsetType] = UNSET """The source type of the SAP ERP CDS View Definition.""" - sap_technical_name: Union[str, None, UnsetType] = UNSET - """Technical identifier for SAP data objects, used for integration and internal reference.""" - sap_logical_name: Union[str, None, UnsetType] = UNSET """Logical, business-friendly identifier for SAP data objects, aligned with business terminology and concepts.""" @@ -238,66 +232,6 @@ class SapErpCdsView(Asset): def __post_init__(self) -> None: self.type_name = "SapErpCdsView" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpCdsView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpCdsView validation failed: {errors}") - - def minimize(self) -> "SapErpCdsView": - """ - Return a minimal copy of this SapErpCdsView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpCdsView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpCdsView instance with only the minimum required fields. - """ - self.validate() - return SapErpCdsView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpCdsView": - """ - Create a :class:`RelatedSapErpCdsView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpCdsView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpCdsView(guid=self.guid) - return RelatedSapErpCdsView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -353,18 +287,15 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SapErpCdsVi class SapErpCdsViewAttributes(AssetAttributes): """SapErpCdsView-specific attributes for nested API format.""" - sap_erp_cds_view_technical_name: Union[str, None, UnsetType] = UNSET - """The technical database view name of the SAP ERP CDS View.""" + sap_technical_name: Union[str, None, UnsetType] = UNSET + """Technical identifier for SAP data objects, used for integration and internal reference.""" - sap_erp_cds_view_source_name: Union[str, None, UnsetType] = UNSET + sap_source_name: Union[str, None, UnsetType] = UNSET """The source name of the SAP ERP CDS View Definition.""" - sap_erp_cds_view_source_type: Union[str, None, UnsetType] = UNSET + sap_source_type: Union[str, None, UnsetType] = UNSET """The source type of the SAP ERP CDS View Definition.""" - sap_technical_name: Union[str, None, UnsetType] = UNSET - """Technical identifier for SAP data objects, used for integration and internal reference.""" - sap_logical_name: Union[str, None, UnsetType] = UNSET """Logical, business-friendly identifier for SAP data objects, aligned with business terminology and concepts.""" @@ -545,10 +476,9 @@ def _populate_sap_erp_cds_view_attrs( ) -> None: """Populate SapErpCdsView-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sap_erp_cds_view_technical_name = obj.sap_erp_cds_view_technical_name - attrs.sap_erp_cds_view_source_name = obj.sap_erp_cds_view_source_name - attrs.sap_erp_cds_view_source_type = obj.sap_erp_cds_view_source_type attrs.sap_technical_name = obj.sap_technical_name + attrs.sap_source_name = obj.sap_source_name + attrs.sap_source_type = obj.sap_source_type attrs.sap_logical_name = obj.sap_logical_name attrs.sap_package_name = obj.sap_package_name attrs.sap_component_name = obj.sap_component_name @@ -560,10 +490,9 @@ def _populate_sap_erp_cds_view_attrs( def _extract_sap_erp_cds_view_attrs(attrs: SapErpCdsViewAttributes) -> dict: """Extract all SapErpCdsView attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sap_erp_cds_view_technical_name"] = attrs.sap_erp_cds_view_technical_name - result["sap_erp_cds_view_source_name"] = attrs.sap_erp_cds_view_source_name - result["sap_erp_cds_view_source_type"] = attrs.sap_erp_cds_view_source_type result["sap_technical_name"] = attrs.sap_technical_name + result["sap_source_name"] = attrs.sap_source_name + result["sap_source_type"] = attrs.sap_source_type result["sap_logical_name"] = attrs.sap_logical_name result["sap_package_name"] = attrs.sap_package_name result["sap_component_name"] = attrs.sap_component_name @@ -608,9 +537,6 @@ def _sap_erp_cds_view_to_nested(sap_erp_cds_view: SapErpCdsView) -> SapErpCdsVie is_incomplete=sap_erp_cds_view.is_incomplete, provenance_type=sap_erp_cds_view.provenance_type, home_id=sap_erp_cds_view.home_id, - depth=sap_erp_cds_view.depth, - immediate_upstream=sap_erp_cds_view.immediate_upstream, - immediate_downstream=sap_erp_cds_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -644,6 +570,7 @@ def _sap_erp_cds_view_from_nested(nested: SapErpCdsViewNested) -> SapErpCdsView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -652,9 +579,6 @@ def _sap_erp_cds_view_from_nested(nested: SapErpCdsViewNested) -> SapErpCdsView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_cds_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -683,16 +607,9 @@ def _sap_erp_cds_view_from_nested_bytes(data: bytes, serde: Serde) -> SapErpCdsV RelationField, ) -SapErpCdsView.SAP_ERP_CDS_VIEW_TECHNICAL_NAME = KeywordField( - "sapErpCdsViewTechnicalName", "sapErpCdsViewTechnicalName" -) -SapErpCdsView.SAP_ERP_CDS_VIEW_SOURCE_NAME = KeywordField( - "sapErpCdsViewSourceName", "sapErpCdsViewSourceName" -) -SapErpCdsView.SAP_ERP_CDS_VIEW_SOURCE_TYPE = KeywordField( - "sapErpCdsViewSourceType", "sapErpCdsViewSourceType" -) SapErpCdsView.SAP_TECHNICAL_NAME = KeywordField("sapTechnicalName", "sapTechnicalName") +SapErpCdsView.SAP_SOURCE_NAME = KeywordField("sapSourceName", "sapSourceName") +SapErpCdsView.SAP_SOURCE_TYPE = KeywordField("sapSourceType", "sapSourceType") SapErpCdsView.SAP_LOGICAL_NAME = KeywordField("sapLogicalName", "sapLogicalName") SapErpCdsView.SAP_PACKAGE_NAME = KeywordField("sapPackageName", "sapPackageName") SapErpCdsView.SAP_COMPONENT_NAME = KeywordField("sapComponentName", "sapComponentName") diff --git a/pyatlan_v9/model/assets/sap_erp_column.py b/pyatlan_v9/model/assets/sap_erp_column.py index 61c3adb39..3b0390a3f 100644 --- a/pyatlan_v9/model/assets/sap_erp_column.py +++ b/pyatlan_v9/model/assets/sap_erp_column.py @@ -54,12 +54,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import ( - RelatedSapErpCdsView, - RelatedSapErpColumn, - RelatedSapErpTable, - RelatedSapErpView, -) +from .sap_related import RelatedSapErpCdsView, RelatedSapErpTable, RelatedSapErpView from .schema_registry_related import RelatedSchemaRegistrySubject from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck @@ -76,21 +71,21 @@ class SapErpColumn(Asset): Instance of a SAP Column in Atlan. """ - SAP_ERP_COLUMN_DATA_ELEMENT: ClassVar[Any] = None - SAP_ERP_COLUMN_LOGICAL_DATA_TYPE: ClassVar[Any] = None - SAP_ERP_COLUMN_LENGTH: ClassVar[Any] = None - SAP_ERP_COLUMN_DECIMALS: ClassVar[Any] = None - SAP_ERP_COLUMN_IS_PRIMARY: ClassVar[Any] = None - SAP_ERP_COLUMN_IS_FOREIGN: ClassVar[Any] = None - SAP_ERP_COLUMN_IS_MANDATORY: ClassVar[Any] = None + SAP_DATA_ELEMENT: ClassVar[Any] = None + SAP_LOGICAL_DATA_TYPE: ClassVar[Any] = None + SAP_LENGTH: ClassVar[Any] = None + SAP_DECIMALS: ClassVar[Any] = None + SAP_IS_PRIMARY: ClassVar[Any] = None + SAP_IS_FOREIGN: ClassVar[Any] = None + SAP_IS_MANDATORY: ClassVar[Any] = None SAP_ERP_TABLE_NAME: ClassVar[Any] = None SAP_ERP_TABLE_QUALIFIED_NAME: ClassVar[Any] = None SAP_ERP_VIEW_NAME: ClassVar[Any] = None SAP_ERP_VIEW_QUALIFIED_NAME: ClassVar[Any] = None SAP_ERP_CDS_VIEW_NAME: ClassVar[Any] = None SAP_ERP_CDS_VIEW_QUALIFIED_NAME: ClassVar[Any] = None - SAP_ERP_COLUMN_CHECK_TABLE_NAME: ClassVar[Any] = None - SAP_ERP_COLUMN_CHECK_TABLE_QUALIFIED_NAME: ClassVar[Any] = None + SAP_CHECK_TABLE_NAME: ClassVar[Any] = None + SAP_CHECK_TABLE_QUALIFIED_NAME: ClassVar[Any] = None SAP_TECHNICAL_NAME: ClassVar[Any] = None SAP_LOGICAL_NAME: ClassVar[Any] = None SAP_PACKAGE_NAME: ClassVar[Any] = None @@ -155,25 +150,27 @@ class SapErpColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sap_erp_column_data_element: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SapErpColumn" + + sap_data_element: Union[str, None, UnsetType] = UNSET """Represents the SAP ERP data element, providing semantic information about the column.""" - sap_erp_column_logical_data_type: Union[str, None, UnsetType] = UNSET + sap_logical_data_type: Union[str, None, UnsetType] = UNSET """Specifies the logical data type of values in this SAP ERP column""" - sap_erp_column_length: Union[str, None, UnsetType] = UNSET + sap_length: Union[str, None, UnsetType] = UNSET """Indicates the maximum length of the values that the SAP ERP column can store.""" - sap_erp_column_decimals: Union[str, None, UnsetType] = UNSET + sap_decimals: Union[str, None, UnsetType] = UNSET """Defines the number of decimal places allowed for numeric values in the SAP ERP column.""" - sap_erp_column_is_primary: Union[bool, None, UnsetType] = UNSET + sap_is_primary: Union[bool, None, UnsetType] = UNSET """When true, this column is the primary key for the SAP ERP table or view.""" - sap_erp_column_is_foreign: Union[bool, None, UnsetType] = UNSET + sap_is_foreign: Union[bool, None, UnsetType] = UNSET """When true, this column is the foreign key for the SAP ERP table or view.""" - sap_erp_column_is_mandatory: Union[bool, None, UnsetType] = UNSET + sap_is_mandatory: Union[bool, None, UnsetType] = UNSET """When true, the values in this column can be null.""" sap_erp_table_name: Union[str, None, UnsetType] = UNSET @@ -194,10 +191,10 @@ class SapErpColumn(Asset): sap_erp_cds_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP CDS view in which this column asset exists.""" - sap_erp_column_check_table_name: Union[str, None, UnsetType] = UNSET + sap_check_table_name: Union[str, None, UnsetType] = UNSET """Defines the SAP ERP table name used as a foreign key reference to validate permissible values for this column.""" - sap_erp_column_check_table_qualified_name: Union[str, None, UnsetType] = UNSET + sap_check_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP Table used as a foreign key reference to validate permissible values for this column.""" sap_technical_name: Union[str, None, UnsetType] = UNSET @@ -412,76 +409,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sap_erp_table is UNSET: - errors.append("sap_erp_table is required for creation") - if self.sap_erp_table_name is UNSET: - errors.append("sap_erp_table_name is required for creation") - if self.sap_erp_table_qualified_name is UNSET: - errors.append("sap_erp_table_qualified_name is required for creation") - if errors: - raise ValueError(f"SapErpColumn validation failed: {errors}") - - def minimize(self) -> "SapErpColumn": - """ - Return a minimal copy of this SapErpColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpColumn instance with only the minimum required fields. - """ - self.validate() - return SapErpColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpColumn": - """ - Create a :class:`RelatedSapErpColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpColumn(guid=self.guid) - return RelatedSapErpColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -537,25 +464,25 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SapErpColum class SapErpColumnAttributes(AssetAttributes): """SapErpColumn-specific attributes for nested API format.""" - sap_erp_column_data_element: Union[str, None, UnsetType] = UNSET + sap_data_element: Union[str, None, UnsetType] = UNSET """Represents the SAP ERP data element, providing semantic information about the column.""" - sap_erp_column_logical_data_type: Union[str, None, UnsetType] = UNSET + sap_logical_data_type: Union[str, None, UnsetType] = UNSET """Specifies the logical data type of values in this SAP ERP column""" - sap_erp_column_length: Union[str, None, UnsetType] = UNSET + sap_length: Union[str, None, UnsetType] = UNSET """Indicates the maximum length of the values that the SAP ERP column can store.""" - sap_erp_column_decimals: Union[str, None, UnsetType] = UNSET + sap_decimals: Union[str, None, UnsetType] = UNSET """Defines the number of decimal places allowed for numeric values in the SAP ERP column.""" - sap_erp_column_is_primary: Union[bool, None, UnsetType] = UNSET + sap_is_primary: Union[bool, None, UnsetType] = UNSET """When true, this column is the primary key for the SAP ERP table or view.""" - sap_erp_column_is_foreign: Union[bool, None, UnsetType] = UNSET + sap_is_foreign: Union[bool, None, UnsetType] = UNSET """When true, this column is the foreign key for the SAP ERP table or view.""" - sap_erp_column_is_mandatory: Union[bool, None, UnsetType] = UNSET + sap_is_mandatory: Union[bool, None, UnsetType] = UNSET """When true, the values in this column can be null.""" sap_erp_table_name: Union[str, None, UnsetType] = UNSET @@ -576,10 +503,10 @@ class SapErpColumnAttributes(AssetAttributes): sap_erp_cds_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP CDS view in which this column asset exists.""" - sap_erp_column_check_table_name: Union[str, None, UnsetType] = UNSET + sap_check_table_name: Union[str, None, UnsetType] = UNSET """Defines the SAP ERP table name used as a foreign key reference to validate permissible values for this column.""" - sap_erp_column_check_table_qualified_name: Union[str, None, UnsetType] = UNSET + sap_check_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP Table used as a foreign key reference to validate permissible values for this column.""" sap_technical_name: Union[str, None, UnsetType] = UNSET @@ -857,23 +784,21 @@ def _populate_sap_erp_column_attrs( ) -> None: """Populate SapErpColumn-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sap_erp_column_data_element = obj.sap_erp_column_data_element - attrs.sap_erp_column_logical_data_type = obj.sap_erp_column_logical_data_type - attrs.sap_erp_column_length = obj.sap_erp_column_length - attrs.sap_erp_column_decimals = obj.sap_erp_column_decimals - attrs.sap_erp_column_is_primary = obj.sap_erp_column_is_primary - attrs.sap_erp_column_is_foreign = obj.sap_erp_column_is_foreign - attrs.sap_erp_column_is_mandatory = obj.sap_erp_column_is_mandatory + attrs.sap_data_element = obj.sap_data_element + attrs.sap_logical_data_type = obj.sap_logical_data_type + attrs.sap_length = obj.sap_length + attrs.sap_decimals = obj.sap_decimals + attrs.sap_is_primary = obj.sap_is_primary + attrs.sap_is_foreign = obj.sap_is_foreign + attrs.sap_is_mandatory = obj.sap_is_mandatory attrs.sap_erp_table_name = obj.sap_erp_table_name attrs.sap_erp_table_qualified_name = obj.sap_erp_table_qualified_name attrs.sap_erp_view_name = obj.sap_erp_view_name attrs.sap_erp_view_qualified_name = obj.sap_erp_view_qualified_name attrs.sap_erp_cds_view_name = obj.sap_erp_cds_view_name attrs.sap_erp_cds_view_qualified_name = obj.sap_erp_cds_view_qualified_name - attrs.sap_erp_column_check_table_name = obj.sap_erp_column_check_table_name - attrs.sap_erp_column_check_table_qualified_name = ( - obj.sap_erp_column_check_table_qualified_name - ) + attrs.sap_check_table_name = obj.sap_check_table_name + attrs.sap_check_table_qualified_name = obj.sap_check_table_qualified_name attrs.sap_technical_name = obj.sap_technical_name attrs.sap_logical_name = obj.sap_logical_name attrs.sap_package_name = obj.sap_package_name @@ -904,23 +829,21 @@ def _populate_sap_erp_column_attrs( def _extract_sap_erp_column_attrs(attrs: SapErpColumnAttributes) -> dict: """Extract all SapErpColumn attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sap_erp_column_data_element"] = attrs.sap_erp_column_data_element - result["sap_erp_column_logical_data_type"] = attrs.sap_erp_column_logical_data_type - result["sap_erp_column_length"] = attrs.sap_erp_column_length - result["sap_erp_column_decimals"] = attrs.sap_erp_column_decimals - result["sap_erp_column_is_primary"] = attrs.sap_erp_column_is_primary - result["sap_erp_column_is_foreign"] = attrs.sap_erp_column_is_foreign - result["sap_erp_column_is_mandatory"] = attrs.sap_erp_column_is_mandatory + result["sap_data_element"] = attrs.sap_data_element + result["sap_logical_data_type"] = attrs.sap_logical_data_type + result["sap_length"] = attrs.sap_length + result["sap_decimals"] = attrs.sap_decimals + result["sap_is_primary"] = attrs.sap_is_primary + result["sap_is_foreign"] = attrs.sap_is_foreign + result["sap_is_mandatory"] = attrs.sap_is_mandatory result["sap_erp_table_name"] = attrs.sap_erp_table_name result["sap_erp_table_qualified_name"] = attrs.sap_erp_table_qualified_name result["sap_erp_view_name"] = attrs.sap_erp_view_name result["sap_erp_view_qualified_name"] = attrs.sap_erp_view_qualified_name result["sap_erp_cds_view_name"] = attrs.sap_erp_cds_view_name result["sap_erp_cds_view_qualified_name"] = attrs.sap_erp_cds_view_qualified_name - result["sap_erp_column_check_table_name"] = attrs.sap_erp_column_check_table_name - result["sap_erp_column_check_table_qualified_name"] = ( - attrs.sap_erp_column_check_table_qualified_name - ) + result["sap_check_table_name"] = attrs.sap_check_table_name + result["sap_check_table_qualified_name"] = attrs.sap_check_table_qualified_name result["sap_technical_name"] = attrs.sap_technical_name result["sap_logical_name"] = attrs.sap_logical_name result["sap_package_name"] = attrs.sap_package_name @@ -984,9 +907,6 @@ def _sap_erp_column_to_nested(sap_erp_column: SapErpColumn) -> SapErpColumnNeste is_incomplete=sap_erp_column.is_incomplete, provenance_type=sap_erp_column.provenance_type, home_id=sap_erp_column.home_id, - depth=sap_erp_column.depth, - immediate_upstream=sap_erp_column.immediate_upstream, - immediate_downstream=sap_erp_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1020,6 +940,7 @@ def _sap_erp_column_from_nested(nested: SapErpColumnNested) -> SapErpColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1028,9 +949,6 @@ def _sap_erp_column_from_nested(nested: SapErpColumnNested) -> SapErpColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1061,27 +979,15 @@ def _sap_erp_column_from_nested_bytes(data: bytes, serde: Serde) -> SapErpColumn RelationField, ) -SapErpColumn.SAP_ERP_COLUMN_DATA_ELEMENT = KeywordField( - "sapErpColumnDataElement", "sapErpColumnDataElement" -) -SapErpColumn.SAP_ERP_COLUMN_LOGICAL_DATA_TYPE = KeywordField( - "sapErpColumnLogicalDataType", "sapErpColumnLogicalDataType" -) -SapErpColumn.SAP_ERP_COLUMN_LENGTH = KeywordField( - "sapErpColumnLength", "sapErpColumnLength" -) -SapErpColumn.SAP_ERP_COLUMN_DECIMALS = KeywordField( - "sapErpColumnDecimals", "sapErpColumnDecimals" -) -SapErpColumn.SAP_ERP_COLUMN_IS_PRIMARY = BooleanField( - "sapErpColumnIsPrimary", "sapErpColumnIsPrimary" -) -SapErpColumn.SAP_ERP_COLUMN_IS_FOREIGN = BooleanField( - "sapErpColumnIsForeign", "sapErpColumnIsForeign" -) -SapErpColumn.SAP_ERP_COLUMN_IS_MANDATORY = BooleanField( - "sapErpColumnIsMandatory", "sapErpColumnIsMandatory" +SapErpColumn.SAP_DATA_ELEMENT = KeywordField("sapDataElement", "sapDataElement") +SapErpColumn.SAP_LOGICAL_DATA_TYPE = KeywordField( + "sapLogicalDataType", "sapLogicalDataType" ) +SapErpColumn.SAP_LENGTH = KeywordField("sapLength", "sapLength") +SapErpColumn.SAP_DECIMALS = KeywordField("sapDecimals", "sapDecimals") +SapErpColumn.SAP_IS_PRIMARY = BooleanField("sapIsPrimary", "sapIsPrimary") +SapErpColumn.SAP_IS_FOREIGN = BooleanField("sapIsForeign", "sapIsForeign") +SapErpColumn.SAP_IS_MANDATORY = BooleanField("sapIsMandatory", "sapIsMandatory") SapErpColumn.SAP_ERP_TABLE_NAME = KeywordField("sapErpTableName", "sapErpTableName") SapErpColumn.SAP_ERP_TABLE_QUALIFIED_NAME = KeywordTextField( "sapErpTableQualifiedName", @@ -1100,11 +1006,11 @@ def _sap_erp_column_from_nested_bytes(data: bytes, serde: Serde) -> SapErpColumn "sapErpCdsViewQualifiedName", "sapErpCdsViewQualifiedName.text", ) -SapErpColumn.SAP_ERP_COLUMN_CHECK_TABLE_NAME = KeywordField( - "sapErpColumnCheckTableName", "sapErpColumnCheckTableName" +SapErpColumn.SAP_CHECK_TABLE_NAME = KeywordField( + "sapCheckTableName", "sapCheckTableName" ) -SapErpColumn.SAP_ERP_COLUMN_CHECK_TABLE_QUALIFIED_NAME = KeywordField( - "sapErpColumnCheckTableQualifiedName", "sapErpColumnCheckTableQualifiedName" +SapErpColumn.SAP_CHECK_TABLE_QUALIFIED_NAME = KeywordField( + "sapCheckTableQualifiedName", "sapCheckTableQualifiedName" ) SapErpColumn.SAP_TECHNICAL_NAME = KeywordField("sapTechnicalName", "sapTechnicalName") SapErpColumn.SAP_LOGICAL_NAME = KeywordField("sapLogicalName", "sapLogicalName") diff --git a/pyatlan_v9/model/assets/sap_erp_component.py b/pyatlan_v9/model/assets/sap_erp_component.py index ca9f5db82..c16f50fae 100644 --- a/pyatlan_v9/model/assets/sap_erp_component.py +++ b/pyatlan_v9/model/assets/sap_erp_component.py @@ -114,6 +114,8 @@ class SapErpComponent(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SapErpComponent" + sap_technical_name: Union[str, None, UnsetType] = UNSET """Technical identifier for SAP data objects, used for integration and internal reference.""" @@ -260,66 +262,6 @@ class SapErpComponent(Asset): def __post_init__(self) -> None: self.type_name = "SapErpComponent" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpComponent instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpComponent validation failed: {errors}") - - def minimize(self) -> "SapErpComponent": - """ - Return a minimal copy of this SapErpComponent with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpComponent with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpComponent instance with only the minimum required fields. - """ - self.validate() - return SapErpComponent(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpComponent": - """ - Create a :class:`RelatedSapErpComponent` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpComponent reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpComponent(guid=self.guid) - return RelatedSapErpComponent(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -649,9 +591,6 @@ def _sap_erp_component_to_nested( is_incomplete=sap_erp_component.is_incomplete, provenance_type=sap_erp_component.provenance_type, home_id=sap_erp_component.home_id, - depth=sap_erp_component.depth, - immediate_upstream=sap_erp_component.immediate_upstream, - immediate_downstream=sap_erp_component.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -685,6 +624,7 @@ def _sap_erp_component_from_nested(nested: SapErpComponentNested) -> SapErpCompo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -693,9 +633,6 @@ def _sap_erp_component_from_nested(nested: SapErpComponentNested) -> SapErpCompo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_component_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sap_erp_function_module.py b/pyatlan_v9/model/assets/sap_erp_function_module.py index d9413fefa..249c7020f 100644 --- a/pyatlan_v9/model/assets/sap_erp_function_module.py +++ b/pyatlan_v9/model/assets/sap_erp_function_module.py @@ -46,11 +46,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import ( - RelatedSapErpAbapProgram, - RelatedSapErpComponent, - RelatedSapErpFunctionModule, -) +from .sap_related import RelatedSapErpAbapProgram, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -66,11 +62,11 @@ class SapErpFunctionModule(Asset): Instance of a SAP Function in Atlan. """ - SAP_ERP_FUNCTION_MODULE_GROUP: ClassVar[Any] = None + SAP_GROUP: ClassVar[Any] = None SAP_ERP_FUNCTION_MODULE_IMPORT_PARAMS: ClassVar[Any] = None - SAP_ERP_FUNCTION_MODULE_IMPORT_PARAMS_COUNT: ClassVar[Any] = None + SAP_IMPORT_PARAMS_COUNT: ClassVar[Any] = None SAP_ERP_FUNCTION_MODULE_EXPORT_PARAMS: ClassVar[Any] = None - SAP_ERP_FUNCTION_MODULE_EXPORT_PARAMS_COUNT: ClassVar[Any] = None + SAP_EXPORT_PARAMS_COUNT: ClassVar[Any] = None SAP_ERP_FUNCTION_EXCEPTION_LIST: ClassVar[Any] = None SAP_ERP_FUNCTION_EXCEPTION_LIST_COUNT: ClassVar[Any] = None SAP_TECHNICAL_NAME: ClassVar[Any] = None @@ -111,7 +107,9 @@ class SapErpFunctionModule(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sap_erp_function_module_group: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SapErpFunctionModule" + + sap_group: Union[str, None, UnsetType] = UNSET """Represents the group to which the SAP ERP function module belongs.""" sap_erp_function_module_import_params: Union[ @@ -119,7 +117,7 @@ class SapErpFunctionModule(Asset): ] = UNSET """Parameters imported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_import_params_count: Union[int, None, UnsetType] = UNSET + sap_import_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Import Parameters in a given SAP ERP Function Module.""" sap_erp_function_module_export_params: Union[ @@ -127,7 +125,7 @@ class SapErpFunctionModule(Asset): ] = UNSET """Parameters exported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_export_params_count: Union[int, None, UnsetType] = UNSET + sap_export_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Export Parameters in a given SAP ERP Function Module.""" sap_erp_function_exception_list: Union[List[Dict[str, str]], None, UnsetType] = ( @@ -260,66 +258,6 @@ class SapErpFunctionModule(Asset): def __post_init__(self) -> None: self.type_name = "SapErpFunctionModule" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpFunctionModule instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpFunctionModule validation failed: {errors}") - - def minimize(self) -> "SapErpFunctionModule": - """ - Return a minimal copy of this SapErpFunctionModule with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpFunctionModule with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpFunctionModule instance with only the minimum required fields. - """ - self.validate() - return SapErpFunctionModule(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpFunctionModule": - """ - Create a :class:`RelatedSapErpFunctionModule` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpFunctionModule reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpFunctionModule(guid=self.guid) - return RelatedSapErpFunctionModule(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -377,7 +315,7 @@ def from_json( class SapErpFunctionModuleAttributes(AssetAttributes): """SapErpFunctionModule-specific attributes for nested API format.""" - sap_erp_function_module_group: Union[str, None, UnsetType] = UNSET + sap_group: Union[str, None, UnsetType] = UNSET """Represents the group to which the SAP ERP function module belongs.""" sap_erp_function_module_import_params: Union[ @@ -385,7 +323,7 @@ class SapErpFunctionModuleAttributes(AssetAttributes): ] = UNSET """Parameters imported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_import_params_count: Union[int, None, UnsetType] = UNSET + sap_import_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Import Parameters in a given SAP ERP Function Module.""" sap_erp_function_module_export_params: Union[ @@ -393,7 +331,7 @@ class SapErpFunctionModuleAttributes(AssetAttributes): ] = UNSET """Parameters exported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_export_params_count: Union[int, None, UnsetType] = UNSET + sap_export_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Export Parameters in a given SAP ERP Function Module.""" sap_erp_function_exception_list: Union[List[Dict[str, str]], None, UnsetType] = ( @@ -587,19 +525,15 @@ def _populate_sap_erp_function_module_attrs( ) -> None: """Populate SapErpFunctionModule-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sap_erp_function_module_group = obj.sap_erp_function_module_group + attrs.sap_group = obj.sap_group attrs.sap_erp_function_module_import_params = ( obj.sap_erp_function_module_import_params ) - attrs.sap_erp_function_module_import_params_count = ( - obj.sap_erp_function_module_import_params_count - ) + attrs.sap_import_params_count = obj.sap_import_params_count attrs.sap_erp_function_module_export_params = ( obj.sap_erp_function_module_export_params ) - attrs.sap_erp_function_module_export_params_count = ( - obj.sap_erp_function_module_export_params_count - ) + attrs.sap_export_params_count = obj.sap_export_params_count attrs.sap_erp_function_exception_list = obj.sap_erp_function_exception_list attrs.sap_erp_function_exception_list_count = ( obj.sap_erp_function_exception_list_count @@ -618,19 +552,15 @@ def _extract_sap_erp_function_module_attrs( ) -> dict: """Extract all SapErpFunctionModule attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sap_erp_function_module_group"] = attrs.sap_erp_function_module_group + result["sap_group"] = attrs.sap_group result["sap_erp_function_module_import_params"] = ( attrs.sap_erp_function_module_import_params ) - result["sap_erp_function_module_import_params_count"] = ( - attrs.sap_erp_function_module_import_params_count - ) + result["sap_import_params_count"] = attrs.sap_import_params_count result["sap_erp_function_module_export_params"] = ( attrs.sap_erp_function_module_export_params ) - result["sap_erp_function_module_export_params_count"] = ( - attrs.sap_erp_function_module_export_params_count - ) + result["sap_export_params_count"] = attrs.sap_export_params_count result["sap_erp_function_exception_list"] = attrs.sap_erp_function_exception_list result["sap_erp_function_exception_list_count"] = ( attrs.sap_erp_function_exception_list_count @@ -682,9 +612,6 @@ def _sap_erp_function_module_to_nested( is_incomplete=sap_erp_function_module.is_incomplete, provenance_type=sap_erp_function_module.provenance_type, home_id=sap_erp_function_module.home_id, - depth=sap_erp_function_module.depth, - immediate_upstream=sap_erp_function_module.immediate_upstream, - immediate_downstream=sap_erp_function_module.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -720,6 +647,7 @@ def _sap_erp_function_module_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -728,9 +656,6 @@ def _sap_erp_function_module_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_function_module_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -761,20 +686,18 @@ def _sap_erp_function_module_from_nested_bytes( RelationField, ) -SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_GROUP = KeywordField( - "sapErpFunctionModuleGroup", "sapErpFunctionModuleGroup" -) +SapErpFunctionModule.SAP_GROUP = KeywordField("sapGroup", "sapGroup") SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_IMPORT_PARAMS = KeywordField( "sapErpFunctionModuleImportParams", "sapErpFunctionModuleImportParams" ) -SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_IMPORT_PARAMS_COUNT = NumericField( - "sapErpFunctionModuleImportParamsCount", "sapErpFunctionModuleImportParamsCount" +SapErpFunctionModule.SAP_IMPORT_PARAMS_COUNT = NumericField( + "sapImportParamsCount", "sapImportParamsCount" ) SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_EXPORT_PARAMS = KeywordField( "sapErpFunctionModuleExportParams", "sapErpFunctionModuleExportParams" ) -SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_EXPORT_PARAMS_COUNT = NumericField( - "sapErpFunctionModuleExportParamsCount", "sapErpFunctionModuleExportParamsCount" +SapErpFunctionModule.SAP_EXPORT_PARAMS_COUNT = NumericField( + "sapExportParamsCount", "sapExportParamsCount" ) SapErpFunctionModule.SAP_ERP_FUNCTION_EXCEPTION_LIST = KeywordField( "sapErpFunctionExceptionList", "sapErpFunctionExceptionList" diff --git a/pyatlan_v9/model/assets/sap_erp_table.py b/pyatlan_v9/model/assets/sap_erp_table.py index 2c443ba82..c613850a0 100644 --- a/pyatlan_v9/model/assets/sap_erp_table.py +++ b/pyatlan_v9/model/assets/sap_erp_table.py @@ -46,7 +46,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent, RelatedSapErpTable +from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -102,6 +102,8 @@ class SapErpTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SapErpTable" + sap_erp_table_type: Union[str, None, UnsetType] = UNSET """Type of the SAP ERP table.""" @@ -230,66 +232,6 @@ class SapErpTable(Asset): def __post_init__(self) -> None: self.type_name = "SapErpTable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpTable validation failed: {errors}") - - def minimize(self) -> "SapErpTable": - """ - Return a minimal copy of this SapErpTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpTable instance with only the minimum required fields. - """ - self.validate() - return SapErpTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpTable": - """ - Create a :class:`RelatedSapErpTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpTable(guid=self.guid) - return RelatedSapErpTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -591,9 +533,6 @@ def _sap_erp_table_to_nested(sap_erp_table: SapErpTable) -> SapErpTableNested: is_incomplete=sap_erp_table.is_incomplete, provenance_type=sap_erp_table.provenance_type, home_id=sap_erp_table.home_id, - depth=sap_erp_table.depth, - immediate_upstream=sap_erp_table.immediate_upstream, - immediate_downstream=sap_erp_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -625,6 +564,7 @@ def _sap_erp_table_from_nested(nested: SapErpTableNested) -> SapErpTable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -633,9 +573,6 @@ def _sap_erp_table_from_nested(nested: SapErpTableNested) -> SapErpTable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_table_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sap_erp_transaction_code.py b/pyatlan_v9/model/assets/sap_erp_transaction_code.py index b67870e5a..d3ab71154 100644 --- a/pyatlan_v9/model/assets/sap_erp_transaction_code.py +++ b/pyatlan_v9/model/assets/sap_erp_transaction_code.py @@ -46,11 +46,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import ( - RelatedSapErpAbapProgram, - RelatedSapErpComponent, - RelatedSapErpTransactionCode, -) +from .sap_related import RelatedSapErpAbapProgram, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -104,6 +100,8 @@ class SapErpTransactionCode(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SapErpTransactionCode" + sap_technical_name: Union[str, None, UnsetType] = UNSET """Technical identifier for SAP data objects, used for integration and internal reference.""" @@ -226,66 +224,6 @@ class SapErpTransactionCode(Asset): def __post_init__(self) -> None: self.type_name = "SapErpTransactionCode" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpTransactionCode instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpTransactionCode validation failed: {errors}") - - def minimize(self) -> "SapErpTransactionCode": - """ - Return a minimal copy of this SapErpTransactionCode with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpTransactionCode with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpTransactionCode instance with only the minimum required fields. - """ - self.validate() - return SapErpTransactionCode(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpTransactionCode": - """ - Create a :class:`RelatedSapErpTransactionCode` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpTransactionCode reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpTransactionCode(guid=self.guid) - return RelatedSapErpTransactionCode(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -587,9 +525,6 @@ def _sap_erp_transaction_code_to_nested( is_incomplete=sap_erp_transaction_code.is_incomplete, provenance_type=sap_erp_transaction_code.provenance_type, home_id=sap_erp_transaction_code.home_id, - depth=sap_erp_transaction_code.depth, - immediate_upstream=sap_erp_transaction_code.immediate_upstream, - immediate_downstream=sap_erp_transaction_code.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -625,6 +560,7 @@ def _sap_erp_transaction_code_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -633,9 +569,6 @@ def _sap_erp_transaction_code_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_transaction_code_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sap_erp_view.py b/pyatlan_v9/model/assets/sap_erp_view.py index fe3107203..fba5d2e52 100644 --- a/pyatlan_v9/model/assets/sap_erp_view.py +++ b/pyatlan_v9/model/assets/sap_erp_view.py @@ -46,7 +46,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent, RelatedSapErpView +from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -62,8 +62,8 @@ class SapErpView(Asset): Instance of a SAP table in Atlan. """ - SAP_ERP_VIEW_TYPE: ClassVar[Any] = None - SAP_ERP_VIEW_DEFINITION: ClassVar[Any] = None + SAP_TYPE: ClassVar[Any] = None + SAP_DEFINITION: ClassVar[Any] = None SAP_TECHNICAL_NAME: ClassVar[Any] = None SAP_LOGICAL_NAME: ClassVar[Any] = None SAP_PACKAGE_NAME: ClassVar[Any] = None @@ -102,10 +102,12 @@ class SapErpView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sap_erp_view_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SapErpView" + + sap_type: Union[str, None, UnsetType] = UNSET """Type of the SAP ERP View.""" - sap_erp_view_definition: Union[str, None, UnsetType] = UNSET + sap_definition: Union[str, None, UnsetType] = UNSET """Specifies the definition of the SAP ERP View""" sap_technical_name: Union[str, None, UnsetType] = UNSET @@ -230,66 +232,6 @@ class SapErpView(Asset): def __post_init__(self) -> None: self.type_name = "SapErpView" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpView validation failed: {errors}") - - def minimize(self) -> "SapErpView": - """ - Return a minimal copy of this SapErpView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpView instance with only the minimum required fields. - """ - self.validate() - return SapErpView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpView": - """ - Create a :class:`RelatedSapErpView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpView(guid=self.guid) - return RelatedSapErpView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -345,10 +287,10 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SapErpView: class SapErpViewAttributes(AssetAttributes): """SapErpView-specific attributes for nested API format.""" - sap_erp_view_type: Union[str, None, UnsetType] = UNSET + sap_type: Union[str, None, UnsetType] = UNSET """Type of the SAP ERP View.""" - sap_erp_view_definition: Union[str, None, UnsetType] = UNSET + sap_definition: Union[str, None, UnsetType] = UNSET """Specifies the definition of the SAP ERP View""" sap_technical_name: Union[str, None, UnsetType] = UNSET @@ -530,8 +472,8 @@ class SapErpViewNested(AssetNested): def _populate_sap_erp_view_attrs(attrs: SapErpViewAttributes, obj: SapErpView) -> None: """Populate SapErpView-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sap_erp_view_type = obj.sap_erp_view_type - attrs.sap_erp_view_definition = obj.sap_erp_view_definition + attrs.sap_type = obj.sap_type + attrs.sap_definition = obj.sap_definition attrs.sap_technical_name = obj.sap_technical_name attrs.sap_logical_name = obj.sap_logical_name attrs.sap_package_name = obj.sap_package_name @@ -544,8 +486,8 @@ def _populate_sap_erp_view_attrs(attrs: SapErpViewAttributes, obj: SapErpView) - def _extract_sap_erp_view_attrs(attrs: SapErpViewAttributes) -> dict: """Extract all SapErpView attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sap_erp_view_type"] = attrs.sap_erp_view_type - result["sap_erp_view_definition"] = attrs.sap_erp_view_definition + result["sap_type"] = attrs.sap_type + result["sap_definition"] = attrs.sap_definition result["sap_technical_name"] = attrs.sap_technical_name result["sap_logical_name"] = attrs.sap_logical_name result["sap_package_name"] = attrs.sap_package_name @@ -589,9 +531,6 @@ def _sap_erp_view_to_nested(sap_erp_view: SapErpView) -> SapErpViewNested: is_incomplete=sap_erp_view.is_incomplete, provenance_type=sap_erp_view.provenance_type, home_id=sap_erp_view.home_id, - depth=sap_erp_view.depth, - immediate_upstream=sap_erp_view.immediate_upstream, - immediate_downstream=sap_erp_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -623,6 +562,7 @@ def _sap_erp_view_from_nested(nested: SapErpViewNested) -> SapErpView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -631,9 +571,6 @@ def _sap_erp_view_from_nested(nested: SapErpViewNested) -> SapErpView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -660,10 +597,8 @@ def _sap_erp_view_from_nested_bytes(data: bytes, serde: Serde) -> SapErpView: RelationField, ) -SapErpView.SAP_ERP_VIEW_TYPE = KeywordField("sapErpViewType", "sapErpViewType") -SapErpView.SAP_ERP_VIEW_DEFINITION = KeywordField( - "sapErpViewDefinition", "sapErpViewDefinition" -) +SapErpView.SAP_TYPE = KeywordField("sapType", "sapType") +SapErpView.SAP_DEFINITION = KeywordField("sapDefinition", "sapDefinition") SapErpView.SAP_TECHNICAL_NAME = KeywordField("sapTechnicalName", "sapTechnicalName") SapErpView.SAP_LOGICAL_NAME = KeywordField("sapLogicalName", "sapLogicalName") SapErpView.SAP_PACKAGE_NAME = KeywordField("sapPackageName", "sapPackageName") diff --git a/pyatlan_v9/model/assets/sap_related.py b/pyatlan_v9/model/assets/sap_related.py index d6cb164bd..d7b6d070a 100644 --- a/pyatlan_v9/model/assets/sap_related.py +++ b/pyatlan_v9/model/assets/sap_related.py @@ -92,10 +92,10 @@ class RelatedSapErpView(RelatedSAP): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SapErpView" so it serializes correctly - sap_erp_view_type: Union[str, None, UnsetType] = UNSET + sap_type: Union[str, None, UnsetType] = UNSET """Type of the SAP ERP View.""" - sap_erp_view_definition: Union[str, None, UnsetType] = UNSET + sap_definition: Union[str, None, UnsetType] = UNSET """Specifies the definition of the SAP ERP View""" def __post_init__(self) -> None: @@ -113,13 +113,13 @@ class RelatedSapErpCdsView(RelatedSAP): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SapErpCdsView" so it serializes correctly - sap_erp_cds_view_technical_name: Union[str, None, UnsetType] = UNSET + sap_technical_name: Union[str, None, UnsetType] = UNSET """The technical database view name of the SAP ERP CDS View.""" - sap_erp_cds_view_source_name: Union[str, None, UnsetType] = UNSET + sap_source_name: Union[str, None, UnsetType] = UNSET """The source name of the SAP ERP CDS View Definition.""" - sap_erp_cds_view_source_type: Union[str, None, UnsetType] = UNSET + sap_source_type: Union[str, None, UnsetType] = UNSET """The source type of the SAP ERP CDS View Definition.""" def __post_init__(self) -> None: @@ -137,25 +137,25 @@ class RelatedSapErpColumn(RelatedSAP): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SapErpColumn" so it serializes correctly - sap_erp_column_data_element: Union[str, None, UnsetType] = UNSET + sap_data_element: Union[str, None, UnsetType] = UNSET """Represents the SAP ERP data element, providing semantic information about the column.""" - sap_erp_column_logical_data_type: Union[str, None, UnsetType] = UNSET + sap_logical_data_type: Union[str, None, UnsetType] = UNSET """Specifies the logical data type of values in this SAP ERP column""" - sap_erp_column_length: Union[str, None, UnsetType] = UNSET + sap_length: Union[str, None, UnsetType] = UNSET """Indicates the maximum length of the values that the SAP ERP column can store.""" - sap_erp_column_decimals: Union[str, None, UnsetType] = UNSET + sap_decimals: Union[str, None, UnsetType] = UNSET """Defines the number of decimal places allowed for numeric values in the SAP ERP column.""" - sap_erp_column_is_primary: Union[bool, None, UnsetType] = UNSET + sap_is_primary: Union[bool, None, UnsetType] = UNSET """When true, this column is the primary key for the SAP ERP table or view.""" - sap_erp_column_is_foreign: Union[bool, None, UnsetType] = UNSET + sap_is_foreign: Union[bool, None, UnsetType] = UNSET """When true, this column is the foreign key for the SAP ERP table or view.""" - sap_erp_column_is_mandatory: Union[bool, None, UnsetType] = UNSET + sap_is_mandatory: Union[bool, None, UnsetType] = UNSET """When true, the values in this column can be null.""" sap_erp_table_name: Union[str, None, UnsetType] = UNSET @@ -176,10 +176,10 @@ class RelatedSapErpColumn(RelatedSAP): sap_erp_cds_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP CDS view in which this column asset exists.""" - sap_erp_column_check_table_name: Union[str, None, UnsetType] = UNSET + sap_check_table_name: Union[str, None, UnsetType] = UNSET """Defines the SAP ERP table name used as a foreign key reference to validate permissible values for this column.""" - sap_erp_column_check_table_qualified_name: Union[str, None, UnsetType] = UNSET + sap_check_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP Table used as a foreign key reference to validate permissible values for this column.""" def __post_init__(self) -> None: @@ -212,7 +212,7 @@ class RelatedSapErpFunctionModule(RelatedSAP): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SapErpFunctionModule" so it serializes correctly - sap_erp_function_module_group: Union[str, None, UnsetType] = UNSET + sap_group: Union[str, None, UnsetType] = UNSET """Represents the group to which the SAP ERP function module belongs.""" sap_erp_function_module_import_params: Union[ @@ -220,7 +220,7 @@ class RelatedSapErpFunctionModule(RelatedSAP): ] = UNSET """Parameters imported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_import_params_count: Union[int, None, UnsetType] = UNSET + sap_import_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Import Parameters in a given SAP ERP Function Module.""" sap_erp_function_module_export_params: Union[ @@ -228,7 +228,7 @@ class RelatedSapErpFunctionModule(RelatedSAP): ] = UNSET """Parameters exported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_export_params_count: Union[int, None, UnsetType] = UNSET + sap_export_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Export Parameters in a given SAP ERP Function Module.""" sap_erp_function_exception_list: Union[List[Dict[str, str]], None, UnsetType] = ( diff --git a/pyatlan_v9/model/assets/schema.py b/pyatlan_v9/model/assets/schema.py index 48066d53f..281941824 100644 --- a/pyatlan_v9/model/assets/schema.py +++ b/pyatlan_v9/model/assets/schema.py @@ -75,7 +75,6 @@ RelatedFunction, RelatedMaterialisedView, RelatedProcedure, - RelatedSchema, RelatedTable, RelatedView, ) @@ -92,7 +91,7 @@ class Schema(Asset): """ TABLE_COUNT: ClassVar[Any] = None - SCHEMA_EXTERNAL_LOCATION: ClassVar[Any] = None + SQL_EXTERNAL_LOCATION: ClassVar[Any] = None VIEWS_COUNT: ClassVar[Any] = None LINKED_SCHEMA_QUALIFIED_NAME: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None @@ -165,10 +164,12 @@ class Schema(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Schema" + table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this schema.""" - schema_external_location: Union[str, None, UnsetType] = UNSET + sql_external_location: Union[str, None, UnsetType] = UNSET """External location of this schema, for example: an S3 object location.""" views_count: Union[int, None, UnsetType] = UNSET @@ -415,76 +416,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Schema instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.database is UNSET: - errors.append("database is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"Schema validation failed: {errors}") - - def minimize(self) -> "Schema": - """ - Return a minimal copy of this Schema with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Schema with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Schema instance with only the minimum required fields. - """ - self.validate() - return Schema(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSchema": - """ - Create a :class:`RelatedSchema` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSchema reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSchema(guid=self.guid) - return RelatedSchema(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -635,7 +566,7 @@ class SchemaAttributes(AssetAttributes): table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this schema.""" - schema_external_location: Union[str, None, UnsetType] = UNSET + sql_external_location: Union[str, None, UnsetType] = UNSET """External location of this schema, for example: an S3 object location.""" views_count: Union[int, None, UnsetType] = UNSET @@ -955,7 +886,7 @@ def _populate_schema__attrs(attrs: SchemaAttributes, obj: Schema) -> None: """Populate Schema-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.table_count = obj.table_count - attrs.schema_external_location = obj.schema_external_location + attrs.sql_external_location = obj.sql_external_location attrs.views_count = obj.views_count attrs.linked_schema_qualified_name = obj.linked_schema_qualified_name attrs.query_count = obj.query_count @@ -982,7 +913,7 @@ def _extract_schema__attrs(attrs: SchemaAttributes) -> dict: """Extract all Schema attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["table_count"] = attrs.table_count - result["schema_external_location"] = attrs.schema_external_location + result["sql_external_location"] = attrs.sql_external_location result["views_count"] = attrs.views_count result["linked_schema_qualified_name"] = attrs.linked_schema_qualified_name result["query_count"] = attrs.query_count @@ -1041,9 +972,6 @@ def _schema__to_nested(schema_: Schema) -> SchemaNested: is_incomplete=schema_.is_incomplete, provenance_type=schema_.provenance_type, home_id=schema_.home_id, - depth=schema_.depth, - immediate_upstream=schema_.immediate_upstream, - immediate_downstream=schema_.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1073,6 +1001,7 @@ def _schema__from_nested(nested: SchemaNested) -> Schema: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1081,9 +1010,6 @@ def _schema__from_nested(nested: SchemaNested) -> Schema: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_schema__attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1112,8 +1038,8 @@ def _schema__from_nested_bytes(data: bytes, serde: Serde) -> Schema: ) Schema.TABLE_COUNT = NumericField("tableCount", "tableCount") -Schema.SCHEMA_EXTERNAL_LOCATION = KeywordField( - "schemaExternalLocation", "schemaExternalLocation" +Schema.SQL_EXTERNAL_LOCATION = KeywordField( + "sqlExternalLocation", "sqlExternalLocation" ) Schema.VIEWS_COUNT = NumericField("viewsCount", "viewsCount") Schema.LINKED_SCHEMA_QUALIFIED_NAME = KeywordField( diff --git a/pyatlan_v9/model/assets/schema_registry.py b/pyatlan_v9/model/assets/schema_registry.py index f9820633e..03d720cb5 100644 --- a/pyatlan_v9/model/assets/schema_registry.py +++ b/pyatlan_v9/model/assets/schema_registry.py @@ -46,7 +46,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .schema_registry_related import RelatedSchemaRegistry, RelatedSchemaRegistrySubject +from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -92,6 +92,8 @@ class SchemaRegistry(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SchemaRegistry" + schema_registry_schema_type: Union[str, None, UnsetType] = UNSET """Type of language or specification used to define the schema, for example: JSON, Protobuf, etc.""" @@ -193,66 +195,6 @@ class SchemaRegistry(Asset): def __post_init__(self) -> None: self.type_name = "SchemaRegistry" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SchemaRegistry instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SchemaRegistry validation failed: {errors}") - - def minimize(self) -> "SchemaRegistry": - """ - Return a minimal copy of this SchemaRegistry with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SchemaRegistry with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SchemaRegistry instance with only the minimum required fields. - """ - self.validate() - return SchemaRegistry(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSchemaRegistry": - """ - Create a :class:`RelatedSchemaRegistry` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSchemaRegistry reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSchemaRegistry(guid=self.guid) - return RelatedSchemaRegistry(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -515,9 +457,6 @@ def _schema_registry_to_nested(schema_registry: SchemaRegistry) -> SchemaRegistr is_incomplete=schema_registry.is_incomplete, provenance_type=schema_registry.provenance_type, home_id=schema_registry.home_id, - depth=schema_registry.depth, - immediate_upstream=schema_registry.immediate_upstream, - immediate_downstream=schema_registry.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -551,6 +490,7 @@ def _schema_registry_from_nested(nested: SchemaRegistryNested) -> SchemaRegistry updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -559,9 +499,6 @@ def _schema_registry_from_nested(nested: SchemaRegistryNested) -> SchemaRegistry is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_schema_registry_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/schema_registry_subject.py b/pyatlan_v9/model/assets/schema_registry_subject.py index c0651a06b..28b42a7b5 100644 --- a/pyatlan_v9/model/assets/schema_registry_subject.py +++ b/pyatlan_v9/model/assets/schema_registry_subject.py @@ -100,6 +100,8 @@ class SchemaRegistrySubject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SchemaRegistrySubject" + schema_registry_subject_base_name: Union[str, None, UnsetType] = UNSET """Base name of the subject, without -key, -value prefixes.""" @@ -226,66 +228,6 @@ class SchemaRegistrySubject(Asset): def __post_init__(self) -> None: self.type_name = "SchemaRegistrySubject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SchemaRegistrySubject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SchemaRegistrySubject validation failed: {errors}") - - def minimize(self) -> "SchemaRegistrySubject": - """ - Return a minimal copy of this SchemaRegistrySubject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SchemaRegistrySubject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SchemaRegistrySubject instance with only the minimum required fields. - """ - self.validate() - return SchemaRegistrySubject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSchemaRegistrySubject": - """ - Create a :class:`RelatedSchemaRegistrySubject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSchemaRegistrySubject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSchemaRegistrySubject(guid=self.guid) - return RelatedSchemaRegistrySubject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -614,9 +556,6 @@ def _schema_registry_subject_to_nested( is_incomplete=schema_registry_subject.is_incomplete, provenance_type=schema_registry_subject.provenance_type, home_id=schema_registry_subject.home_id, - depth=schema_registry_subject.depth, - immediate_upstream=schema_registry_subject.immediate_upstream, - immediate_downstream=schema_registry_subject.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -652,6 +591,7 @@ def _schema_registry_subject_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -660,9 +600,6 @@ def _schema_registry_subject_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_schema_registry_subject_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/semantic.py b/pyatlan_v9/model/assets/semantic.py index 323cf52d2..f09646d05 100644 --- a/pyatlan_v9/model/assets/semantic.py +++ b/pyatlan_v9/model/assets/semantic.py @@ -47,7 +47,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemantic from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -91,6 +90,8 @@ class Semantic(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Semantic" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class Semantic(Asset): def __post_init__(self) -> None: self.type_name = "Semantic" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Semantic instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Semantic validation failed: {errors}") - - def minimize(self) -> "Semantic": - """ - Return a minimal copy of this Semantic with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Semantic with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Semantic instance with only the minimum required fields. - """ - self.validate() - return Semantic(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemantic": - """ - Create a :class:`RelatedSemantic` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemantic reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemantic(guid=self.guid) - return RelatedSemantic(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _semantic_to_nested(semantic: Semantic) -> SemanticNested: is_incomplete=semantic.is_incomplete, provenance_type=semantic.provenance_type, home_id=semantic.home_id, - depth=semantic.depth, - immediate_upstream=semantic.immediate_upstream, - immediate_downstream=semantic.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -527,6 +465,7 @@ def _semantic_from_nested(nested: SemanticNested) -> Semantic: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -535,9 +474,6 @@ def _semantic_from_nested(nested: SemanticNested) -> Semantic: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/semantic_dimension.py b/pyatlan_v9/model/assets/semantic_dimension.py index d242fed1b..8193e90e1 100644 --- a/pyatlan_v9/model/assets/semantic_dimension.py +++ b/pyatlan_v9/model/assets/semantic_dimension.py @@ -48,7 +48,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemanticDimension, RelatedSemanticModel +from .semantic_related import RelatedSemanticModel from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -100,6 +100,8 @@ class SemanticDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticDimension" + semantic_expression: Union[str, None, UnsetType] = UNSET """Column name or SQL expression for the semantic field.""" @@ -225,72 +227,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.semantic_model is UNSET: - errors.append("semantic_model is required for creation") - if errors: - raise ValueError(f"SemanticDimension validation failed: {errors}") - - def minimize(self) -> "SemanticDimension": - """ - Return a minimal copy of this SemanticDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticDimension instance with only the minimum required fields. - """ - self.validate() - return SemanticDimension(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticDimension": - """ - Create a :class:`RelatedSemanticDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticDimension(guid=self.guid) - return RelatedSemanticDimension(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -586,9 +522,6 @@ def _semantic_dimension_to_nested( is_incomplete=semantic_dimension.is_incomplete, provenance_type=semantic_dimension.provenance_type, home_id=semantic_dimension.home_id, - depth=semantic_dimension.depth, - immediate_upstream=semantic_dimension.immediate_upstream, - immediate_downstream=semantic_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -624,6 +557,7 @@ def _semantic_dimension_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -632,9 +566,6 @@ def _semantic_dimension_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/semantic_entity.py b/pyatlan_v9/model/assets/semantic_entity.py index 98af650fa..33ccf1ed1 100644 --- a/pyatlan_v9/model/assets/semantic_entity.py +++ b/pyatlan_v9/model/assets/semantic_entity.py @@ -48,7 +48,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemanticEntity, RelatedSemanticModel +from .semantic_related import RelatedSemanticModel from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -100,6 +100,8 @@ class SemanticEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticEntity" + semantic_expression: Union[str, None, UnsetType] = UNSET """Column name or SQL expression for the semantic field.""" @@ -225,72 +227,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.semantic_model is UNSET: - errors.append("semantic_model is required for creation") - if errors: - raise ValueError(f"SemanticEntity validation failed: {errors}") - - def minimize(self) -> "SemanticEntity": - """ - Return a minimal copy of this SemanticEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticEntity instance with only the minimum required fields. - """ - self.validate() - return SemanticEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticEntity": - """ - Create a :class:`RelatedSemanticEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticEntity(guid=self.guid) - return RelatedSemanticEntity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -582,9 +518,6 @@ def _semantic_entity_to_nested(semantic_entity: SemanticEntity) -> SemanticEntit is_incomplete=semantic_entity.is_incomplete, provenance_type=semantic_entity.provenance_type, home_id=semantic_entity.home_id, - depth=semantic_entity.depth, - immediate_upstream=semantic_entity.immediate_upstream, - immediate_downstream=semantic_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -618,6 +551,7 @@ def _semantic_entity_from_nested(nested: SemanticEntityNested) -> SemanticEntity updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -626,9 +560,6 @@ def _semantic_entity_from_nested(nested: SemanticEntityNested) -> SemanticEntity is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_entity_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/semantic_field.py b/pyatlan_v9/model/assets/semantic_field.py index d0cca4e94..e834216bb 100644 --- a/pyatlan_v9/model/assets/semantic_field.py +++ b/pyatlan_v9/model/assets/semantic_field.py @@ -47,7 +47,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemanticField from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -98,6 +97,8 @@ class SemanticField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticField" + semantic_expression: Union[str, None, UnsetType] = UNSET """Column name or SQL expression for the semantic field.""" @@ -214,66 +215,6 @@ class SemanticField(Asset): def __post_init__(self) -> None: self.type_name = "SemanticField" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SemanticField validation failed: {errors}") - - def minimize(self) -> "SemanticField": - """ - Return a minimal copy of this SemanticField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticField instance with only the minimum required fields. - """ - self.validate() - return SemanticField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticField": - """ - Create a :class:`RelatedSemanticField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticField(guid=self.guid) - return RelatedSemanticField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -559,9 +500,6 @@ def _semantic_field_to_nested(semantic_field: SemanticField) -> SemanticFieldNes is_incomplete=semantic_field.is_incomplete, provenance_type=semantic_field.provenance_type, home_id=semantic_field.home_id, - depth=semantic_field.depth, - immediate_upstream=semantic_field.immediate_upstream, - immediate_downstream=semantic_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -595,6 +533,7 @@ def _semantic_field_from_nested(nested: SemanticFieldNested) -> SemanticField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -603,9 +542,6 @@ def _semantic_field_from_nested(nested: SemanticFieldNested) -> SemanticField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/semantic_measure.py b/pyatlan_v9/model/assets/semantic_measure.py index a6f659d68..aee033617 100644 --- a/pyatlan_v9/model/assets/semantic_measure.py +++ b/pyatlan_v9/model/assets/semantic_measure.py @@ -48,7 +48,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemanticMeasure, RelatedSemanticModel +from .semantic_related import RelatedSemanticModel from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -100,6 +100,8 @@ class SemanticMeasure(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticMeasure" + semantic_expression: Union[str, None, UnsetType] = UNSET """Column name or SQL expression for the semantic field.""" @@ -225,72 +227,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticMeasure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.semantic_model is UNSET: - errors.append("semantic_model is required for creation") - if errors: - raise ValueError(f"SemanticMeasure validation failed: {errors}") - - def minimize(self) -> "SemanticMeasure": - """ - Return a minimal copy of this SemanticMeasure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticMeasure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticMeasure instance with only the minimum required fields. - """ - self.validate() - return SemanticMeasure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticMeasure": - """ - Create a :class:`RelatedSemanticMeasure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticMeasure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticMeasure(guid=self.guid) - return RelatedSemanticMeasure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -586,9 +522,6 @@ def _semantic_measure_to_nested( is_incomplete=semantic_measure.is_incomplete, provenance_type=semantic_measure.provenance_type, home_id=semantic_measure.home_id, - depth=semantic_measure.depth, - immediate_upstream=semantic_measure.immediate_upstream, - immediate_downstream=semantic_measure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -622,6 +555,7 @@ def _semantic_measure_from_nested(nested: SemanticMeasureNested) -> SemanticMeas updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -630,9 +564,6 @@ def _semantic_measure_from_nested(nested: SemanticMeasureNested) -> SemanticMeas is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_measure_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/semantic_model.py b/pyatlan_v9/model/assets/semantic_model.py index 552826b6a..a0c92d3b8 100644 --- a/pyatlan_v9/model/assets/semantic_model.py +++ b/pyatlan_v9/model/assets/semantic_model.py @@ -51,7 +51,6 @@ RelatedSemanticDimension, RelatedSemanticEntity, RelatedSemanticMeasure, - RelatedSemanticModel, ) from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -99,6 +98,8 @@ class SemanticModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticModel" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -203,66 +204,6 @@ class SemanticModel(Asset): def __post_init__(self) -> None: self.type_name = "SemanticModel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SemanticModel validation failed: {errors}") - - def minimize(self) -> "SemanticModel": - """ - Return a minimal copy of this SemanticModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticModel instance with only the minimum required fields. - """ - self.validate() - return SemanticModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticModel": - """ - Create a :class:`RelatedSemanticModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticModel(guid=self.guid) - return RelatedSemanticModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -526,9 +467,6 @@ def _semantic_model_to_nested(semantic_model: SemanticModel) -> SemanticModelNes is_incomplete=semantic_model.is_incomplete, provenance_type=semantic_model.provenance_type, home_id=semantic_model.home_id, - depth=semantic_model.depth, - immediate_upstream=semantic_model.immediate_upstream, - immediate_downstream=semantic_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -562,6 +500,7 @@ def _semantic_model_from_nested(nested: SemanticModelNested) -> SemanticModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -570,9 +509,6 @@ def _semantic_model_from_nested(nested: SemanticModelNested) -> SemanticModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_model_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sigma.py b/pyatlan_v9/model/assets/sigma.py index ad2f951e3..8e2f19eba 100644 --- a/pyatlan_v9/model/assets/sigma.py +++ b/pyatlan_v9/model/assets/sigma.py @@ -47,7 +47,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigma from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -97,6 +96,8 @@ class Sigma(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Sigma" + sigma_workbook_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workbook in which this asset exists.""" @@ -210,66 +211,6 @@ class Sigma(Asset): def __post_init__(self) -> None: self.type_name = "Sigma" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Sigma instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Sigma validation failed: {errors}") - - def minimize(self) -> "Sigma": - """ - Return a minimal copy of this Sigma with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Sigma with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Sigma instance with only the minimum required fields. - """ - self.validate() - return Sigma(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigma": - """ - Create a :class:`RelatedSigma` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigma reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigma(guid=self.guid) - return RelatedSigma(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -548,9 +489,6 @@ def _sigma_to_nested(sigma: Sigma) -> SigmaNested: is_incomplete=sigma.is_incomplete, provenance_type=sigma.provenance_type, home_id=sigma.home_id, - depth=sigma.depth, - immediate_upstream=sigma.immediate_upstream, - immediate_downstream=sigma.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -580,6 +518,7 @@ def _sigma_from_nested(nested: SigmaNested) -> Sigma: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -588,9 +527,6 @@ def _sigma_from_nested(nested: SigmaNested) -> Sigma: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sigma_data_element.py b/pyatlan_v9/model/assets/sigma_data_element.py index df29db89b..3ea98fbb4 100644 --- a/pyatlan_v9/model/assets/sigma_data_element.py +++ b/pyatlan_v9/model/assets/sigma_data_element.py @@ -48,11 +48,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import ( - RelatedSigmaDataElement, - RelatedSigmaDataElementField, - RelatedSigmaPage, -) +from .sigma_related import RelatedSigmaDataElementField, RelatedSigmaPage from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -107,6 +103,8 @@ class SigmaDataElement(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SigmaDataElement" + sigma_data_element_query: Union[str, None, UnsetType] = UNSET """""" @@ -245,80 +243,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaDataElement instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sigma_page is UNSET: - errors.append("sigma_page is required for creation") - if self.sigma_page_name is UNSET: - errors.append("sigma_page_name is required for creation") - if self.sigma_page_qualified_name is UNSET: - errors.append("sigma_page_qualified_name is required for creation") - if self.sigma_workbook_name is UNSET: - errors.append("sigma_workbook_name is required for creation") - if self.sigma_workbook_qualified_name is UNSET: - errors.append("sigma_workbook_qualified_name is required for creation") - if errors: - raise ValueError(f"SigmaDataElement validation failed: {errors}") - - def minimize(self) -> "SigmaDataElement": - """ - Return a minimal copy of this SigmaDataElement with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaDataElement with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaDataElement instance with only the minimum required fields. - """ - self.validate() - return SigmaDataElement(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaDataElement": - """ - Create a :class:`RelatedSigmaDataElement` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaDataElement reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaDataElement(guid=self.guid) - return RelatedSigmaDataElement(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -632,9 +556,6 @@ def _sigma_data_element_to_nested( is_incomplete=sigma_data_element.is_incomplete, provenance_type=sigma_data_element.provenance_type, home_id=sigma_data_element.home_id, - depth=sigma_data_element.depth, - immediate_upstream=sigma_data_element.immediate_upstream, - immediate_downstream=sigma_data_element.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -668,6 +589,7 @@ def _sigma_data_element_from_nested(nested: SigmaDataElementNested) -> SigmaData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -676,9 +598,6 @@ def _sigma_data_element_from_nested(nested: SigmaDataElementNested) -> SigmaData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_data_element_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sigma_data_element_field.py b/pyatlan_v9/model/assets/sigma_data_element_field.py index 66324dd64..e1e29ad73 100644 --- a/pyatlan_v9/model/assets/sigma_data_element_field.py +++ b/pyatlan_v9/model/assets/sigma_data_element_field.py @@ -48,7 +48,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigmaDataElement, RelatedSigmaDataElementField +from .sigma_related import RelatedSigmaDataElement from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -63,7 +63,7 @@ class SigmaDataElementField(Asset): Instance of a Sigma data element field in Atlan. """ - SIGMA_DATA_ELEMENT_FIELD_IS_HIDDEN: ClassVar[Any] = None + SIGMA_IS_HIDDEN: ClassVar[Any] = None SIGMA_DATA_ELEMENT_FIELD_FORMULA: ClassVar[Any] = None SIGMA_WORKBOOK_QUALIFIED_NAME: ClassVar[Any] = None SIGMA_WORKBOOK_NAME: ClassVar[Any] = None @@ -101,7 +101,9 @@ class SigmaDataElementField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sigma_data_element_field_is_hidden: Union[bool, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SigmaDataElementField" + + sigma_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this field is hidden (true) or not (false).""" sigma_data_element_field_formula: Union[str, None, UnsetType] = UNSET @@ -231,86 +233,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaDataElementField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sigma_data_element is UNSET: - errors.append("sigma_data_element is required for creation") - if self.sigma_data_element_name is UNSET: - errors.append("sigma_data_element_name is required for creation") - if self.sigma_data_element_qualified_name is UNSET: - errors.append( - "sigma_data_element_qualified_name is required for creation" - ) - if self.sigma_page_name is UNSET: - errors.append("sigma_page_name is required for creation") - if self.sigma_page_qualified_name is UNSET: - errors.append("sigma_page_qualified_name is required for creation") - if self.sigma_workbook_name is UNSET: - errors.append("sigma_workbook_name is required for creation") - if self.sigma_workbook_qualified_name is UNSET: - errors.append("sigma_workbook_qualified_name is required for creation") - if errors: - raise ValueError(f"SigmaDataElementField validation failed: {errors}") - - def minimize(self) -> "SigmaDataElementField": - """ - Return a minimal copy of this SigmaDataElementField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaDataElementField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaDataElementField instance with only the minimum required fields. - """ - self.validate() - return SigmaDataElementField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaDataElementField": - """ - Create a :class:`RelatedSigmaDataElementField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaDataElementField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaDataElementField(guid=self.guid) - return RelatedSigmaDataElementField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -368,7 +290,7 @@ def from_json( class SigmaDataElementFieldAttributes(AssetAttributes): """SigmaDataElementField-specific attributes for nested API format.""" - sigma_data_element_field_is_hidden: Union[bool, None, UnsetType] = UNSET + sigma_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this field is hidden (true) or not (false).""" sigma_data_element_field_formula: Union[str, None, UnsetType] = UNSET @@ -550,7 +472,7 @@ def _populate_sigma_data_element_field_attrs( ) -> None: """Populate SigmaDataElementField-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sigma_data_element_field_is_hidden = obj.sigma_data_element_field_is_hidden + attrs.sigma_is_hidden = obj.sigma_is_hidden attrs.sigma_data_element_field_formula = obj.sigma_data_element_field_formula attrs.sigma_workbook_qualified_name = obj.sigma_workbook_qualified_name attrs.sigma_workbook_name = obj.sigma_workbook_name @@ -565,9 +487,7 @@ def _extract_sigma_data_element_field_attrs( ) -> dict: """Extract all SigmaDataElementField attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sigma_data_element_field_is_hidden"] = ( - attrs.sigma_data_element_field_is_hidden - ) + result["sigma_is_hidden"] = attrs.sigma_is_hidden result["sigma_data_element_field_formula"] = attrs.sigma_data_element_field_formula result["sigma_workbook_qualified_name"] = attrs.sigma_workbook_qualified_name result["sigma_workbook_name"] = attrs.sigma_workbook_name @@ -617,9 +537,6 @@ def _sigma_data_element_field_to_nested( is_incomplete=sigma_data_element_field.is_incomplete, provenance_type=sigma_data_element_field.provenance_type, home_id=sigma_data_element_field.home_id, - depth=sigma_data_element_field.depth, - immediate_upstream=sigma_data_element_field.immediate_upstream, - immediate_downstream=sigma_data_element_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -655,6 +572,7 @@ def _sigma_data_element_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -663,9 +581,6 @@ def _sigma_data_element_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_data_element_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -697,9 +612,7 @@ def _sigma_data_element_field_from_nested_bytes( RelationField, ) -SigmaDataElementField.SIGMA_DATA_ELEMENT_FIELD_IS_HIDDEN = BooleanField( - "sigmaDataElementFieldIsHidden", "sigmaDataElementFieldIsHidden" -) +SigmaDataElementField.SIGMA_IS_HIDDEN = BooleanField("sigmaIsHidden", "sigmaIsHidden") SigmaDataElementField.SIGMA_DATA_ELEMENT_FIELD_FORMULA = KeywordField( "sigmaDataElementFieldFormula", "sigmaDataElementFieldFormula" ) diff --git a/pyatlan_v9/model/assets/sigma_dataset.py b/pyatlan_v9/model/assets/sigma_dataset.py index 7581a328b..27768ebc7 100644 --- a/pyatlan_v9/model/assets/sigma_dataset.py +++ b/pyatlan_v9/model/assets/sigma_dataset.py @@ -47,7 +47,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigmaDataset, RelatedSigmaDatasetColumn +from .sigma_related import RelatedSigmaDatasetColumn from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -62,7 +62,7 @@ class SigmaDataset(Asset): Instance of a Sigma dataset in Atlan. """ - SIGMA_DATASET_COLUMN_COUNT: ClassVar[Any] = None + SIGMA_COLUMN_COUNT: ClassVar[Any] = None SIGMA_WORKBOOK_QUALIFIED_NAME: ClassVar[Any] = None SIGMA_WORKBOOK_NAME: ClassVar[Any] = None SIGMA_PAGE_QUALIFIED_NAME: ClassVar[Any] = None @@ -99,7 +99,9 @@ class SigmaDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sigma_dataset_column_count: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SigmaDataset" + + sigma_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this dataset.""" sigma_workbook_qualified_name: Union[str, None, UnsetType] = UNSET @@ -220,66 +222,6 @@ class SigmaDataset(Asset): def __post_init__(self) -> None: self.type_name = "SigmaDataset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SigmaDataset validation failed: {errors}") - - def minimize(self) -> "SigmaDataset": - """ - Return a minimal copy of this SigmaDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaDataset instance with only the minimum required fields. - """ - self.validate() - return SigmaDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaDataset": - """ - Create a :class:`RelatedSigmaDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaDataset(guid=self.guid) - return RelatedSigmaDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -335,7 +277,7 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SigmaDatase class SigmaDatasetAttributes(AssetAttributes): """SigmaDataset-specific attributes for nested API format.""" - sigma_dataset_column_count: Union[int, None, UnsetType] = UNSET + sigma_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this dataset.""" sigma_workbook_qualified_name: Union[str, None, UnsetType] = UNSET @@ -516,7 +458,7 @@ def _populate_sigma_dataset_attrs( ) -> None: """Populate SigmaDataset-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sigma_dataset_column_count = obj.sigma_dataset_column_count + attrs.sigma_column_count = obj.sigma_column_count attrs.sigma_workbook_qualified_name = obj.sigma_workbook_qualified_name attrs.sigma_workbook_name = obj.sigma_workbook_name attrs.sigma_page_qualified_name = obj.sigma_page_qualified_name @@ -528,7 +470,7 @@ def _populate_sigma_dataset_attrs( def _extract_sigma_dataset_attrs(attrs: SigmaDatasetAttributes) -> dict: """Extract all SigmaDataset attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sigma_dataset_column_count"] = attrs.sigma_dataset_column_count + result["sigma_column_count"] = attrs.sigma_column_count result["sigma_workbook_qualified_name"] = attrs.sigma_workbook_qualified_name result["sigma_workbook_name"] = attrs.sigma_workbook_name result["sigma_page_qualified_name"] = attrs.sigma_page_qualified_name @@ -573,9 +515,6 @@ def _sigma_dataset_to_nested(sigma_dataset: SigmaDataset) -> SigmaDatasetNested: is_incomplete=sigma_dataset.is_incomplete, provenance_type=sigma_dataset.provenance_type, home_id=sigma_dataset.home_id, - depth=sigma_dataset.depth, - immediate_upstream=sigma_dataset.immediate_upstream, - immediate_downstream=sigma_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -609,6 +548,7 @@ def _sigma_dataset_from_nested(nested: SigmaDatasetNested) -> SigmaDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -617,9 +557,6 @@ def _sigma_dataset_from_nested(nested: SigmaDatasetNested) -> SigmaDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -647,9 +584,7 @@ def _sigma_dataset_from_nested_bytes(data: bytes, serde: Serde) -> SigmaDataset: RelationField, ) -SigmaDataset.SIGMA_DATASET_COLUMN_COUNT = NumericField( - "sigmaDatasetColumnCount", "sigmaDatasetColumnCount" -) +SigmaDataset.SIGMA_COLUMN_COUNT = NumericField("sigmaColumnCount", "sigmaColumnCount") SigmaDataset.SIGMA_WORKBOOK_QUALIFIED_NAME = KeywordTextField( "sigmaWorkbookQualifiedName", "sigmaWorkbookQualifiedName", diff --git a/pyatlan_v9/model/assets/sigma_dataset_column.py b/pyatlan_v9/model/assets/sigma_dataset_column.py index ba960295c..ee58d0e48 100644 --- a/pyatlan_v9/model/assets/sigma_dataset_column.py +++ b/pyatlan_v9/model/assets/sigma_dataset_column.py @@ -48,7 +48,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigmaDataset, RelatedSigmaDatasetColumn +from .sigma_related import RelatedSigmaDataset from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -101,6 +101,8 @@ class SigmaDatasetColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SigmaDatasetColumn" + sigma_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the dataset in which this column exists.""" @@ -229,76 +231,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaDatasetColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sigma_dataset is UNSET: - errors.append("sigma_dataset is required for creation") - if self.sigma_dataset_name is UNSET: - errors.append("sigma_dataset_name is required for creation") - if self.sigma_dataset_qualified_name is UNSET: - errors.append("sigma_dataset_qualified_name is required for creation") - if errors: - raise ValueError(f"SigmaDatasetColumn validation failed: {errors}") - - def minimize(self) -> "SigmaDatasetColumn": - """ - Return a minimal copy of this SigmaDatasetColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaDatasetColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaDatasetColumn instance with only the minimum required fields. - """ - self.validate() - return SigmaDatasetColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaDatasetColumn": - """ - Create a :class:`RelatedSigmaDatasetColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaDatasetColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaDatasetColumn(guid=self.guid) - return RelatedSigmaDatasetColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -601,9 +533,6 @@ def _sigma_dataset_column_to_nested( is_incomplete=sigma_dataset_column.is_incomplete, provenance_type=sigma_dataset_column.provenance_type, home_id=sigma_dataset_column.home_id, - depth=sigma_dataset_column.depth, - immediate_upstream=sigma_dataset_column.immediate_upstream, - immediate_downstream=sigma_dataset_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -639,6 +568,7 @@ def _sigma_dataset_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -647,9 +577,6 @@ def _sigma_dataset_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_dataset_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sigma_page.py b/pyatlan_v9/model/assets/sigma_page.py index 0640b3b19..bc56ae85d 100644 --- a/pyatlan_v9/model/assets/sigma_page.py +++ b/pyatlan_v9/model/assets/sigma_page.py @@ -48,11 +48,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import ( - RelatedSigmaDataElement, - RelatedSigmaPage, - RelatedSigmaWorkbook, -) +from .sigma_related import RelatedSigmaDataElement, RelatedSigmaWorkbook from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -105,6 +101,8 @@ class SigmaPage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SigmaPage" + sigma_data_element_count: Union[int, None, UnsetType] = UNSET """Number of data elements on this page.""" @@ -233,76 +231,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaPage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sigma_workbook is UNSET: - errors.append("sigma_workbook is required for creation") - if self.sigma_workbook_name is UNSET: - errors.append("sigma_workbook_name is required for creation") - if self.sigma_workbook_qualified_name is UNSET: - errors.append("sigma_workbook_qualified_name is required for creation") - if errors: - raise ValueError(f"SigmaPage validation failed: {errors}") - - def minimize(self) -> "SigmaPage": - """ - Return a minimal copy of this SigmaPage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaPage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaPage instance with only the minimum required fields. - """ - self.validate() - return SigmaPage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaPage": - """ - Create a :class:`RelatedSigmaPage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaPage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaPage(guid=self.guid) - return RelatedSigmaPage(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -594,9 +522,6 @@ def _sigma_page_to_nested(sigma_page: SigmaPage) -> SigmaPageNested: is_incomplete=sigma_page.is_incomplete, provenance_type=sigma_page.provenance_type, home_id=sigma_page.home_id, - depth=sigma_page.depth, - immediate_upstream=sigma_page.immediate_upstream, - immediate_downstream=sigma_page.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -628,6 +553,7 @@ def _sigma_page_from_nested(nested: SigmaPageNested) -> SigmaPage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -636,9 +562,6 @@ def _sigma_page_from_nested(nested: SigmaPageNested) -> SigmaPage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_page_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sigma_related.py b/pyatlan_v9/model/assets/sigma_related.py index 57a58a497..62c3e8fca 100644 --- a/pyatlan_v9/model/assets/sigma_related.py +++ b/pyatlan_v9/model/assets/sigma_related.py @@ -96,7 +96,7 @@ class RelatedSigmaDataElementField(RelatedSigma): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SigmaDataElementField" so it serializes correctly - sigma_data_element_field_is_hidden: Union[bool, None, UnsetType] = UNSET + sigma_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this field is hidden (true) or not (false).""" sigma_data_element_field_formula: Union[str, None, UnsetType] = UNSET @@ -117,7 +117,7 @@ class RelatedSigmaDataset(RelatedSigma): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SigmaDataset" so it serializes correctly - sigma_dataset_column_count: Union[int, None, UnsetType] = UNSET + sigma_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this dataset.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/sigma_workbook.py b/pyatlan_v9/model/assets/sigma_workbook.py index 17475b8f9..3bb3a07da 100644 --- a/pyatlan_v9/model/assets/sigma_workbook.py +++ b/pyatlan_v9/model/assets/sigma_workbook.py @@ -47,7 +47,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigmaPage, RelatedSigmaWorkbook +from .sigma_related import RelatedSigmaPage from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -99,6 +99,8 @@ class SigmaWorkbook(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SigmaWorkbook" + sigma_page_count: Union[int, None, UnsetType] = UNSET """Number of pages in this workbook.""" @@ -218,66 +220,6 @@ class SigmaWorkbook(Asset): def __post_init__(self) -> None: self.type_name = "SigmaWorkbook" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaWorkbook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SigmaWorkbook validation failed: {errors}") - - def minimize(self) -> "SigmaWorkbook": - """ - Return a minimal copy of this SigmaWorkbook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaWorkbook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaWorkbook instance with only the minimum required fields. - """ - self.validate() - return SigmaWorkbook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaWorkbook": - """ - Create a :class:`RelatedSigmaWorkbook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaWorkbook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaWorkbook(guid=self.guid) - return RelatedSigmaWorkbook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -569,9 +511,6 @@ def _sigma_workbook_to_nested(sigma_workbook: SigmaWorkbook) -> SigmaWorkbookNes is_incomplete=sigma_workbook.is_incomplete, provenance_type=sigma_workbook.provenance_type, home_id=sigma_workbook.home_id, - depth=sigma_workbook.depth, - immediate_upstream=sigma_workbook.immediate_upstream, - immediate_downstream=sigma_workbook.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -605,6 +544,7 @@ def _sigma_workbook_from_nested(nested: SigmaWorkbookNested) -> SigmaWorkbook: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -613,9 +553,6 @@ def _sigma_workbook_from_nested(nested: SigmaWorkbookNested) -> SigmaWorkbook: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_workbook_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sisense.py b/pyatlan_v9/model/assets/sisense.py index 118d1a733..46b95d91a 100644 --- a/pyatlan_v9/model/assets/sisense.py +++ b/pyatlan_v9/model/assets/sisense.py @@ -47,7 +47,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sisense_related import RelatedSisense from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -91,6 +90,8 @@ class Sisense(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Sisense" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -186,66 +187,6 @@ class Sisense(Asset): def __post_init__(self) -> None: self.type_name = "Sisense" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Sisense instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Sisense validation failed: {errors}") - - def minimize(self) -> "Sisense": - """ - Return a minimal copy of this Sisense with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Sisense with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Sisense instance with only the minimum required fields. - """ - self.validate() - return Sisense(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisense": - """ - Create a :class:`RelatedSisense` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisense reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisense(guid=self.guid) - return RelatedSisense(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,9 +434,6 @@ def _sisense_to_nested(sisense: Sisense) -> SisenseNested: is_incomplete=sisense.is_incomplete, provenance_type=sisense.provenance_type, home_id=sisense.home_id, - depth=sisense.depth, - immediate_upstream=sisense.immediate_upstream, - immediate_downstream=sisense.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -525,6 +463,7 @@ def _sisense_from_nested(nested: SisenseNested) -> Sisense: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -533,9 +472,6 @@ def _sisense_from_nested(nested: SisenseNested) -> Sisense: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sisense_dashboard.py b/pyatlan_v9/model/assets/sisense_dashboard.py index fe7dc4dae..e9cff6f0c 100644 --- a/pyatlan_v9/model/assets/sisense_dashboard.py +++ b/pyatlan_v9/model/assets/sisense_dashboard.py @@ -49,7 +49,6 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .sisense_related import ( - RelatedSisenseDashboard, RelatedSisenseDatamodel, RelatedSisenseFolder, RelatedSisenseWidget, @@ -69,7 +68,7 @@ class SisenseDashboard(Asset): """ SISENSE_DASHBOARD_FOLDER_QUALIFIED_NAME: ClassVar[Any] = None - SISENSE_DASHBOARD_WIDGET_COUNT: ClassVar[Any] = None + SISENSE_WIDGET_COUNT: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None @@ -102,10 +101,12 @@ class SisenseDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SisenseDashboard" + sisense_dashboard_folder_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the folder in which this dashboard exists.""" - sisense_dashboard_widget_count: Union[int, None, UnsetType] = UNSET + sisense_widget_count: Union[int, None, UnsetType] = UNSET """Number of widgets in this dashboard.""" input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET @@ -218,72 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sisense_datamodels is UNSET: - errors.append("sisense_datamodels is required for creation") - if errors: - raise ValueError(f"SisenseDashboard validation failed: {errors}") - - def minimize(self) -> "SisenseDashboard": - """ - Return a minimal copy of this SisenseDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseDashboard instance with only the minimum required fields. - """ - self.validate() - return SisenseDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseDashboard": - """ - Create a :class:`RelatedSisenseDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseDashboard(guid=self.guid) - return RelatedSisenseDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -344,7 +279,7 @@ class SisenseDashboardAttributes(AssetAttributes): sisense_dashboard_folder_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the folder in which this dashboard exists.""" - sisense_dashboard_widget_count: Union[int, None, UnsetType] = UNSET + sisense_widget_count: Union[int, None, UnsetType] = UNSET """Number of widgets in this dashboard.""" @@ -516,7 +451,7 @@ def _populate_sisense_dashboard_attrs( attrs.sisense_dashboard_folder_qualified_name = ( obj.sisense_dashboard_folder_qualified_name ) - attrs.sisense_dashboard_widget_count = obj.sisense_dashboard_widget_count + attrs.sisense_widget_count = obj.sisense_widget_count def _extract_sisense_dashboard_attrs(attrs: SisenseDashboardAttributes) -> dict: @@ -525,7 +460,7 @@ def _extract_sisense_dashboard_attrs(attrs: SisenseDashboardAttributes) -> dict: result["sisense_dashboard_folder_qualified_name"] = ( attrs.sisense_dashboard_folder_qualified_name ) - result["sisense_dashboard_widget_count"] = attrs.sisense_dashboard_widget_count + result["sisense_widget_count"] = attrs.sisense_widget_count return result @@ -566,9 +501,6 @@ def _sisense_dashboard_to_nested( is_incomplete=sisense_dashboard.is_incomplete, provenance_type=sisense_dashboard.provenance_type, home_id=sisense_dashboard.home_id, - depth=sisense_dashboard.depth, - immediate_upstream=sisense_dashboard.immediate_upstream, - immediate_downstream=sisense_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -602,6 +534,7 @@ def _sisense_dashboard_from_nested(nested: SisenseDashboardNested) -> SisenseDas updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -610,9 +543,6 @@ def _sisense_dashboard_from_nested(nested: SisenseDashboardNested) -> SisenseDas is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -646,8 +576,8 @@ def _sisense_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> SisenseDa "sisenseDashboardFolderQualifiedName", "sisenseDashboardFolderQualifiedName.text", ) -SisenseDashboard.SISENSE_DASHBOARD_WIDGET_COUNT = NumericField( - "sisenseDashboardWidgetCount", "sisenseDashboardWidgetCount" +SisenseDashboard.SISENSE_WIDGET_COUNT = NumericField( + "sisenseWidgetCount", "sisenseWidgetCount" ) SisenseDashboard.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") SisenseDashboard.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") diff --git a/pyatlan_v9/model/assets/sisense_datamodel.py b/pyatlan_v9/model/assets/sisense_datamodel.py index 6e226e5f0..7632a4d81 100644 --- a/pyatlan_v9/model/assets/sisense_datamodel.py +++ b/pyatlan_v9/model/assets/sisense_datamodel.py @@ -47,11 +47,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sisense_related import ( - RelatedSisenseDashboard, - RelatedSisenseDatamodel, - RelatedSisenseDatamodelTable, -) +from .sisense_related import RelatedSisenseDashboard, RelatedSisenseDatamodelTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -66,12 +62,12 @@ class SisenseDatamodel(Asset): Instance of a Sisense datamodel in Atlan. These group tables together that you can use to build dashboards. """ - SISENSE_DATAMODEL_TABLE_COUNT: ClassVar[Any] = None + SISENSE_TABLE_COUNT: ClassVar[Any] = None SISENSE_DATAMODEL_SERVER: ClassVar[Any] = None - SISENSE_DATAMODEL_REVISION: ClassVar[Any] = None - SISENSE_DATAMODEL_LAST_BUILD_TIME: ClassVar[Any] = None - SISENSE_DATAMODEL_LAST_SUCCESSFUL_BUILD_TIME: ClassVar[Any] = None - SISENSE_DATAMODEL_LAST_PUBLISH_TIME: ClassVar[Any] = None + SISENSE_REVISION: ClassVar[Any] = None + SISENSE_LAST_BUILD_TIME: ClassVar[Any] = None + SISENSE_LAST_SUCCESSFUL_BUILD_TIME: ClassVar[Any] = None + SISENSE_LAST_PUBLISH_TIME: ClassVar[Any] = None SISENSE_DATAMODEL_TYPE: ClassVar[Any] = None SISENSE_DATAMODEL_RELATION_TYPE: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None @@ -105,22 +101,24 @@ class SisenseDatamodel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sisense_datamodel_table_count: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SisenseDatamodel" + + sisense_table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this datamodel.""" sisense_datamodel_server: Union[str, None, UnsetType] = UNSET """Hostname of the server on which this datamodel was created.""" - sisense_datamodel_revision: Union[str, None, UnsetType] = UNSET + sisense_revision: Union[str, None, UnsetType] = UNSET """Revision of this datamodel.""" - sisense_datamodel_last_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built, in milliseconds.""" - sisense_datamodel_last_successful_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_successful_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built successfully, in milliseconds.""" - sisense_datamodel_last_publish_time: Union[int, None, UnsetType] = UNSET + sisense_last_publish_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last published, in milliseconds.""" sisense_datamodel_type: Union[str, None, UnsetType] = UNSET @@ -232,66 +230,6 @@ class SisenseDatamodel(Asset): def __post_init__(self) -> None: self.type_name = "SisenseDatamodel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseDatamodel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SisenseDatamodel validation failed: {errors}") - - def minimize(self) -> "SisenseDatamodel": - """ - Return a minimal copy of this SisenseDatamodel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseDatamodel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseDatamodel instance with only the minimum required fields. - """ - self.validate() - return SisenseDatamodel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseDatamodel": - """ - Create a :class:`RelatedSisenseDatamodel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseDatamodel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseDatamodel(guid=self.guid) - return RelatedSisenseDatamodel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -349,22 +287,22 @@ def from_json( class SisenseDatamodelAttributes(AssetAttributes): """SisenseDatamodel-specific attributes for nested API format.""" - sisense_datamodel_table_count: Union[int, None, UnsetType] = UNSET + sisense_table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this datamodel.""" sisense_datamodel_server: Union[str, None, UnsetType] = UNSET """Hostname of the server on which this datamodel was created.""" - sisense_datamodel_revision: Union[str, None, UnsetType] = UNSET + sisense_revision: Union[str, None, UnsetType] = UNSET """Revision of this datamodel.""" - sisense_datamodel_last_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built, in milliseconds.""" - sisense_datamodel_last_successful_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_successful_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built successfully, in milliseconds.""" - sisense_datamodel_last_publish_time: Union[int, None, UnsetType] = UNSET + sisense_last_publish_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last published, in milliseconds.""" sisense_datamodel_type: Union[str, None, UnsetType] = UNSET @@ -537,14 +475,12 @@ def _populate_sisense_datamodel_attrs( ) -> None: """Populate SisenseDatamodel-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sisense_datamodel_table_count = obj.sisense_datamodel_table_count + attrs.sisense_table_count = obj.sisense_table_count attrs.sisense_datamodel_server = obj.sisense_datamodel_server - attrs.sisense_datamodel_revision = obj.sisense_datamodel_revision - attrs.sisense_datamodel_last_build_time = obj.sisense_datamodel_last_build_time - attrs.sisense_datamodel_last_successful_build_time = ( - obj.sisense_datamodel_last_successful_build_time - ) - attrs.sisense_datamodel_last_publish_time = obj.sisense_datamodel_last_publish_time + attrs.sisense_revision = obj.sisense_revision + attrs.sisense_last_build_time = obj.sisense_last_build_time + attrs.sisense_last_successful_build_time = obj.sisense_last_successful_build_time + attrs.sisense_last_publish_time = obj.sisense_last_publish_time attrs.sisense_datamodel_type = obj.sisense_datamodel_type attrs.sisense_datamodel_relation_type = obj.sisense_datamodel_relation_type @@ -552,18 +488,14 @@ def _populate_sisense_datamodel_attrs( def _extract_sisense_datamodel_attrs(attrs: SisenseDatamodelAttributes) -> dict: """Extract all SisenseDatamodel attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sisense_datamodel_table_count"] = attrs.sisense_datamodel_table_count + result["sisense_table_count"] = attrs.sisense_table_count result["sisense_datamodel_server"] = attrs.sisense_datamodel_server - result["sisense_datamodel_revision"] = attrs.sisense_datamodel_revision - result["sisense_datamodel_last_build_time"] = ( - attrs.sisense_datamodel_last_build_time - ) - result["sisense_datamodel_last_successful_build_time"] = ( - attrs.sisense_datamodel_last_successful_build_time - ) - result["sisense_datamodel_last_publish_time"] = ( - attrs.sisense_datamodel_last_publish_time + result["sisense_revision"] = attrs.sisense_revision + result["sisense_last_build_time"] = attrs.sisense_last_build_time + result["sisense_last_successful_build_time"] = ( + attrs.sisense_last_successful_build_time ) + result["sisense_last_publish_time"] = attrs.sisense_last_publish_time result["sisense_datamodel_type"] = attrs.sisense_datamodel_type result["sisense_datamodel_relation_type"] = attrs.sisense_datamodel_relation_type return result @@ -606,9 +538,6 @@ def _sisense_datamodel_to_nested( is_incomplete=sisense_datamodel.is_incomplete, provenance_type=sisense_datamodel.provenance_type, home_id=sisense_datamodel.home_id, - depth=sisense_datamodel.depth, - immediate_upstream=sisense_datamodel.immediate_upstream, - immediate_downstream=sisense_datamodel.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -642,6 +571,7 @@ def _sisense_datamodel_from_nested(nested: SisenseDatamodelNested) -> SisenseDat updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -650,9 +580,6 @@ def _sisense_datamodel_from_nested(nested: SisenseDatamodelNested) -> SisenseDat is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_datamodel_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -681,23 +608,21 @@ def _sisense_datamodel_from_nested_bytes(data: bytes, serde: Serde) -> SisenseDa RelationField, ) -SisenseDatamodel.SISENSE_DATAMODEL_TABLE_COUNT = NumericField( - "sisenseDatamodelTableCount", "sisenseDatamodelTableCount" +SisenseDatamodel.SISENSE_TABLE_COUNT = NumericField( + "sisenseTableCount", "sisenseTableCount" ) SisenseDatamodel.SISENSE_DATAMODEL_SERVER = KeywordField( "sisenseDatamodelServer", "sisenseDatamodelServer" ) -SisenseDatamodel.SISENSE_DATAMODEL_REVISION = KeywordField( - "sisenseDatamodelRevision", "sisenseDatamodelRevision" -) -SisenseDatamodel.SISENSE_DATAMODEL_LAST_BUILD_TIME = NumericField( - "sisenseDatamodelLastBuildTime", "sisenseDatamodelLastBuildTime" +SisenseDatamodel.SISENSE_REVISION = KeywordField("sisenseRevision", "sisenseRevision") +SisenseDatamodel.SISENSE_LAST_BUILD_TIME = NumericField( + "sisenseLastBuildTime", "sisenseLastBuildTime" ) -SisenseDatamodel.SISENSE_DATAMODEL_LAST_SUCCESSFUL_BUILD_TIME = NumericField( - "sisenseDatamodelLastSuccessfulBuildTime", "sisenseDatamodelLastSuccessfulBuildTime" +SisenseDatamodel.SISENSE_LAST_SUCCESSFUL_BUILD_TIME = NumericField( + "sisenseLastSuccessfulBuildTime", "sisenseLastSuccessfulBuildTime" ) -SisenseDatamodel.SISENSE_DATAMODEL_LAST_PUBLISH_TIME = NumericField( - "sisenseDatamodelLastPublishTime", "sisenseDatamodelLastPublishTime" +SisenseDatamodel.SISENSE_LAST_PUBLISH_TIME = NumericField( + "sisenseLastPublishTime", "sisenseLastPublishTime" ) SisenseDatamodel.SISENSE_DATAMODEL_TYPE = KeywordField( "sisenseDatamodelType", "sisenseDatamodelType" diff --git a/pyatlan_v9/model/assets/sisense_datamodel_table.py b/pyatlan_v9/model/assets/sisense_datamodel_table.py index 4e8cff8fb..166b6034c 100644 --- a/pyatlan_v9/model/assets/sisense_datamodel_table.py +++ b/pyatlan_v9/model/assets/sisense_datamodel_table.py @@ -48,11 +48,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sisense_related import ( - RelatedSisenseDatamodel, - RelatedSisenseDatamodelTable, - RelatedSisenseWidget, -) +from .sisense_related import RelatedSisenseDatamodel, RelatedSisenseWidget from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -68,13 +64,13 @@ class SisenseDatamodelTable(Asset): """ SISENSE_DATAMODEL_QUALIFIED_NAME: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_COLUMN_COUNT: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_TYPE: ClassVar[Any] = None + SISENSE_COLUMN_COUNT: ClassVar[Any] = None + SISENSE_TYPE: ClassVar[Any] = None SISENSE_DATAMODEL_TABLE_EXPRESSION: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_IS_MATERIALIZED: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_IS_HIDDEN: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_SCHEDULE: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_LIVE_QUERY_SETTINGS: ClassVar[Any] = None + SISENSE_IS_MATERIALIZED: ClassVar[Any] = None + SISENSE_IS_HIDDEN: ClassVar[Any] = None + SISENSE_SCHEDULE: ClassVar[Any] = None + SISENSE_LIVE_QUERY_SETTINGS: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None @@ -106,28 +102,30 @@ class SisenseDatamodelTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SisenseDatamodelTable" + sisense_datamodel_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the datamodel in which this datamodel table exists.""" - sisense_datamodel_table_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this datamodel table.""" - sisense_datamodel_table_type: Union[str, None, UnsetType] = UNSET + sisense_type: Union[str, None, UnsetType] = UNSET """Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables.""" sisense_datamodel_table_expression: Union[str, None, UnsetType] = UNSET """SQL expression of this datamodel table.""" - sisense_datamodel_table_is_materialized: Union[bool, None, UnsetType] = UNSET + sisense_is_materialized: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is materialised (true) or not (false).""" - sisense_datamodel_table_is_hidden: Union[bool, None, UnsetType] = UNSET + sisense_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is hidden in Sisense (true) or not (false).""" - sisense_datamodel_table_schedule: Union[str, None, UnsetType] = UNSET + sisense_schedule: Union[str, None, UnsetType] = UNSET """JSON specifying the refresh schedule of this datamodel table.""" - sisense_datamodel_table_live_query_settings: Union[str, None, UnsetType] = UNSET + sisense_live_query_settings: Union[str, None, UnsetType] = UNSET """JSON specifying the LiveQuery settings of this datamodel table.""" input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET @@ -237,76 +235,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseDatamodelTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sisense_datamodel is UNSET: - errors.append("sisense_datamodel is required for creation") - if self.sisense_datamodel_qualified_name is UNSET: - errors.append( - "sisense_datamodel_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SisenseDatamodelTable validation failed: {errors}") - - def minimize(self) -> "SisenseDatamodelTable": - """ - Return a minimal copy of this SisenseDatamodelTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseDatamodelTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseDatamodelTable instance with only the minimum required fields. - """ - self.validate() - return SisenseDatamodelTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseDatamodelTable": - """ - Create a :class:`RelatedSisenseDatamodelTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseDatamodelTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseDatamodelTable(guid=self.guid) - return RelatedSisenseDatamodelTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,25 +295,25 @@ class SisenseDatamodelTableAttributes(AssetAttributes): sisense_datamodel_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the datamodel in which this datamodel table exists.""" - sisense_datamodel_table_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this datamodel table.""" - sisense_datamodel_table_type: Union[str, None, UnsetType] = UNSET + sisense_type: Union[str, None, UnsetType] = UNSET """Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables.""" sisense_datamodel_table_expression: Union[str, None, UnsetType] = UNSET """SQL expression of this datamodel table.""" - sisense_datamodel_table_is_materialized: Union[bool, None, UnsetType] = UNSET + sisense_is_materialized: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is materialised (true) or not (false).""" - sisense_datamodel_table_is_hidden: Union[bool, None, UnsetType] = UNSET + sisense_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is hidden in Sisense (true) or not (false).""" - sisense_datamodel_table_schedule: Union[str, None, UnsetType] = UNSET + sisense_schedule: Union[str, None, UnsetType] = UNSET """JSON specifying the refresh schedule of this datamodel table.""" - sisense_datamodel_table_live_query_settings: Union[str, None, UnsetType] = UNSET + sisense_live_query_settings: Union[str, None, UnsetType] = UNSET """JSON specifying the LiveQuery settings of this datamodel table.""" @@ -551,19 +479,13 @@ def _populate_sisense_datamodel_table_attrs( """Populate SisenseDatamodelTable-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.sisense_datamodel_qualified_name = obj.sisense_datamodel_qualified_name - attrs.sisense_datamodel_table_column_count = ( - obj.sisense_datamodel_table_column_count - ) - attrs.sisense_datamodel_table_type = obj.sisense_datamodel_table_type + attrs.sisense_column_count = obj.sisense_column_count + attrs.sisense_type = obj.sisense_type attrs.sisense_datamodel_table_expression = obj.sisense_datamodel_table_expression - attrs.sisense_datamodel_table_is_materialized = ( - obj.sisense_datamodel_table_is_materialized - ) - attrs.sisense_datamodel_table_is_hidden = obj.sisense_datamodel_table_is_hidden - attrs.sisense_datamodel_table_schedule = obj.sisense_datamodel_table_schedule - attrs.sisense_datamodel_table_live_query_settings = ( - obj.sisense_datamodel_table_live_query_settings - ) + attrs.sisense_is_materialized = obj.sisense_is_materialized + attrs.sisense_is_hidden = obj.sisense_is_hidden + attrs.sisense_schedule = obj.sisense_schedule + attrs.sisense_live_query_settings = obj.sisense_live_query_settings def _extract_sisense_datamodel_table_attrs( @@ -572,23 +494,15 @@ def _extract_sisense_datamodel_table_attrs( """Extract all SisenseDatamodelTable attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["sisense_datamodel_qualified_name"] = attrs.sisense_datamodel_qualified_name - result["sisense_datamodel_table_column_count"] = ( - attrs.sisense_datamodel_table_column_count - ) - result["sisense_datamodel_table_type"] = attrs.sisense_datamodel_table_type + result["sisense_column_count"] = attrs.sisense_column_count + result["sisense_type"] = attrs.sisense_type result["sisense_datamodel_table_expression"] = ( attrs.sisense_datamodel_table_expression ) - result["sisense_datamodel_table_is_materialized"] = ( - attrs.sisense_datamodel_table_is_materialized - ) - result["sisense_datamodel_table_is_hidden"] = ( - attrs.sisense_datamodel_table_is_hidden - ) - result["sisense_datamodel_table_schedule"] = attrs.sisense_datamodel_table_schedule - result["sisense_datamodel_table_live_query_settings"] = ( - attrs.sisense_datamodel_table_live_query_settings - ) + result["sisense_is_materialized"] = attrs.sisense_is_materialized + result["sisense_is_hidden"] = attrs.sisense_is_hidden + result["sisense_schedule"] = attrs.sisense_schedule + result["sisense_live_query_settings"] = attrs.sisense_live_query_settings return result @@ -629,9 +543,6 @@ def _sisense_datamodel_table_to_nested( is_incomplete=sisense_datamodel_table.is_incomplete, provenance_type=sisense_datamodel_table.provenance_type, home_id=sisense_datamodel_table.home_id, - depth=sisense_datamodel_table.depth, - immediate_upstream=sisense_datamodel_table.immediate_upstream, - immediate_downstream=sisense_datamodel_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -667,6 +578,7 @@ def _sisense_datamodel_table_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -675,9 +587,6 @@ def _sisense_datamodel_table_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_datamodel_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -715,26 +624,24 @@ def _sisense_datamodel_table_from_nested_bytes( "sisenseDatamodelQualifiedName", "sisenseDatamodelQualifiedName.text", ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_COLUMN_COUNT = NumericField( - "sisenseDatamodelTableColumnCount", "sisenseDatamodelTableColumnCount" -) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_TYPE = KeywordField( - "sisenseDatamodelTableType", "sisenseDatamodelTableType" +SisenseDatamodelTable.SISENSE_COLUMN_COUNT = NumericField( + "sisenseColumnCount", "sisenseColumnCount" ) +SisenseDatamodelTable.SISENSE_TYPE = KeywordField("sisenseType", "sisenseType") SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_EXPRESSION = KeywordField( "sisenseDatamodelTableExpression", "sisenseDatamodelTableExpression" ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_IS_MATERIALIZED = BooleanField( - "sisenseDatamodelTableIsMaterialized", "sisenseDatamodelTableIsMaterialized" +SisenseDatamodelTable.SISENSE_IS_MATERIALIZED = BooleanField( + "sisenseIsMaterialized", "sisenseIsMaterialized" ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_IS_HIDDEN = BooleanField( - "sisenseDatamodelTableIsHidden", "sisenseDatamodelTableIsHidden" +SisenseDatamodelTable.SISENSE_IS_HIDDEN = BooleanField( + "sisenseIsHidden", "sisenseIsHidden" ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_SCHEDULE = KeywordField( - "sisenseDatamodelTableSchedule", "sisenseDatamodelTableSchedule" +SisenseDatamodelTable.SISENSE_SCHEDULE = KeywordField( + "sisenseSchedule", "sisenseSchedule" ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_LIVE_QUERY_SETTINGS = KeywordField( - "sisenseDatamodelTableLiveQuerySettings", "sisenseDatamodelTableLiveQuerySettings" +SisenseDatamodelTable.SISENSE_LIVE_QUERY_SETTINGS = KeywordField( + "sisenseLiveQuerySettings", "sisenseLiveQuerySettings" ) SisenseDatamodelTable.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") SisenseDatamodelTable.OUTPUT_FROM_AIRFLOW_TASKS = RelationField( diff --git a/pyatlan_v9/model/assets/sisense_folder.py b/pyatlan_v9/model/assets/sisense_folder.py index 7c85c7f49..8360acb81 100644 --- a/pyatlan_v9/model/assets/sisense_folder.py +++ b/pyatlan_v9/model/assets/sisense_folder.py @@ -101,6 +101,8 @@ class SisenseFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SisenseFolder" + sisense_folder_parent_folder_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent folder in which this folder exists.""" @@ -217,70 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"SisenseFolder validation failed: {errors}") - - def minimize(self) -> "SisenseFolder": - """ - Return a minimal copy of this SisenseFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseFolder instance with only the minimum required fields. - """ - self.validate() - return SisenseFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseFolder": - """ - Create a :class:`RelatedSisenseFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseFolder(guid=self.guid) - return RelatedSisenseFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -556,9 +494,6 @@ def _sisense_folder_to_nested(sisense_folder: SisenseFolder) -> SisenseFolderNes is_incomplete=sisense_folder.is_incomplete, provenance_type=sisense_folder.provenance_type, home_id=sisense_folder.home_id, - depth=sisense_folder.depth, - immediate_upstream=sisense_folder.immediate_upstream, - immediate_downstream=sisense_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -592,6 +527,7 @@ def _sisense_folder_from_nested(nested: SisenseFolderNested) -> SisenseFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -600,9 +536,6 @@ def _sisense_folder_from_nested(nested: SisenseFolderNested) -> SisenseFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_folder_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sisense_related.py b/pyatlan_v9/model/assets/sisense_related.py index aa86c502b..7c64a5103 100644 --- a/pyatlan_v9/model/assets/sisense_related.py +++ b/pyatlan_v9/model/assets/sisense_related.py @@ -56,7 +56,7 @@ class RelatedSisenseDashboard(RelatedSisense): sisense_dashboard_folder_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the folder in which this dashboard exists.""" - sisense_dashboard_widget_count: Union[int, None, UnsetType] = UNSET + sisense_widget_count: Union[int, None, UnsetType] = UNSET """Number of widgets in this dashboard.""" def __post_init__(self) -> None: @@ -74,22 +74,22 @@ class RelatedSisenseDatamodel(RelatedSisense): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SisenseDatamodel" so it serializes correctly - sisense_datamodel_table_count: Union[int, None, UnsetType] = UNSET + sisense_table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this datamodel.""" sisense_datamodel_server: Union[str, None, UnsetType] = UNSET """Hostname of the server on which this datamodel was created.""" - sisense_datamodel_revision: Union[str, None, UnsetType] = UNSET + sisense_revision: Union[str, None, UnsetType] = UNSET """Revision of this datamodel.""" - sisense_datamodel_last_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built, in milliseconds.""" - sisense_datamodel_last_successful_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_successful_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built successfully, in milliseconds.""" - sisense_datamodel_last_publish_time: Union[int, None, UnsetType] = UNSET + sisense_last_publish_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last published, in milliseconds.""" sisense_datamodel_type: Union[str, None, UnsetType] = UNSET @@ -116,25 +116,25 @@ class RelatedSisenseDatamodelTable(RelatedSisense): sisense_datamodel_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the datamodel in which this datamodel table exists.""" - sisense_datamodel_table_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this datamodel table.""" - sisense_datamodel_table_type: Union[str, None, UnsetType] = UNSET + sisense_type: Union[str, None, UnsetType] = UNSET """Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables.""" sisense_datamodel_table_expression: Union[str, None, UnsetType] = UNSET """SQL expression of this datamodel table.""" - sisense_datamodel_table_is_materialized: Union[bool, None, UnsetType] = UNSET + sisense_is_materialized: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is materialised (true) or not (false).""" - sisense_datamodel_table_is_hidden: Union[bool, None, UnsetType] = UNSET + sisense_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is hidden in Sisense (true) or not (false).""" - sisense_datamodel_table_schedule: Union[str, None, UnsetType] = UNSET + sisense_schedule: Union[str, None, UnsetType] = UNSET """JSON specifying the refresh schedule of this datamodel table.""" - sisense_datamodel_table_live_query_settings: Union[str, None, UnsetType] = UNSET + sisense_live_query_settings: Union[str, None, UnsetType] = UNSET """JSON specifying the LiveQuery settings of this datamodel table.""" def __post_init__(self) -> None: @@ -170,13 +170,13 @@ class RelatedSisenseWidget(RelatedSisense): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SisenseWidget" so it serializes correctly - sisense_widget_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns used in this widget.""" - sisense_widget_sub_type: Union[str, None, UnsetType] = UNSET + sisense_sub_type: Union[str, None, UnsetType] = UNSET """Subtype of this widget.""" - sisense_widget_size: Union[str, None, UnsetType] = UNSET + sisense_size: Union[str, None, UnsetType] = UNSET """Size of this widget.""" sisense_widget_dashboard_qualified_name: Union[str, None, UnsetType] = UNSET diff --git a/pyatlan_v9/model/assets/sisense_widget.py b/pyatlan_v9/model/assets/sisense_widget.py index 769a6ad59..5a93b0dd4 100644 --- a/pyatlan_v9/model/assets/sisense_widget.py +++ b/pyatlan_v9/model/assets/sisense_widget.py @@ -52,7 +52,6 @@ RelatedSisenseDashboard, RelatedSisenseDatamodelTable, RelatedSisenseFolder, - RelatedSisenseWidget, ) from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -68,9 +67,9 @@ class SisenseWidget(Asset): Instance of a Sisense widget in Atlan. """ - SISENSE_WIDGET_COLUMN_COUNT: ClassVar[Any] = None - SISENSE_WIDGET_SUB_TYPE: ClassVar[Any] = None - SISENSE_WIDGET_SIZE: ClassVar[Any] = None + SISENSE_COLUMN_COUNT: ClassVar[Any] = None + SISENSE_SUB_TYPE: ClassVar[Any] = None + SISENSE_SIZE: ClassVar[Any] = None SISENSE_WIDGET_DASHBOARD_QUALIFIED_NAME: ClassVar[Any] = None SISENSE_WIDGET_FOLDER_QUALIFIED_NAME: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None @@ -105,13 +104,15 @@ class SisenseWidget(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sisense_widget_column_count: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SisenseWidget" + + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns used in this widget.""" - sisense_widget_sub_type: Union[str, None, UnsetType] = UNSET + sisense_sub_type: Union[str, None, UnsetType] = UNSET """Subtype of this widget.""" - sisense_widget_size: Union[str, None, UnsetType] = UNSET + sisense_size: Union[str, None, UnsetType] = UNSET """Size of this widget.""" sisense_widget_dashboard_qualified_name: Union[str, None, UnsetType] = UNSET @@ -234,72 +235,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseWidget instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sisense_dashboard is UNSET: - errors.append("sisense_dashboard is required for creation") - if errors: - raise ValueError(f"SisenseWidget validation failed: {errors}") - - def minimize(self) -> "SisenseWidget": - """ - Return a minimal copy of this SisenseWidget with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseWidget with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseWidget instance with only the minimum required fields. - """ - self.validate() - return SisenseWidget(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseWidget": - """ - Create a :class:`RelatedSisenseWidget` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseWidget reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseWidget(guid=self.guid) - return RelatedSisenseWidget(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -355,13 +290,13 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SisenseWidg class SisenseWidgetAttributes(AssetAttributes): """SisenseWidget-specific attributes for nested API format.""" - sisense_widget_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns used in this widget.""" - sisense_widget_sub_type: Union[str, None, UnsetType] = UNSET + sisense_sub_type: Union[str, None, UnsetType] = UNSET """Subtype of this widget.""" - sisense_widget_size: Union[str, None, UnsetType] = UNSET + sisense_size: Union[str, None, UnsetType] = UNSET """Size of this widget.""" sisense_widget_dashboard_qualified_name: Union[str, None, UnsetType] = UNSET @@ -538,9 +473,9 @@ def _populate_sisense_widget_attrs( ) -> None: """Populate SisenseWidget-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sisense_widget_column_count = obj.sisense_widget_column_count - attrs.sisense_widget_sub_type = obj.sisense_widget_sub_type - attrs.sisense_widget_size = obj.sisense_widget_size + attrs.sisense_column_count = obj.sisense_column_count + attrs.sisense_sub_type = obj.sisense_sub_type + attrs.sisense_size = obj.sisense_size attrs.sisense_widget_dashboard_qualified_name = ( obj.sisense_widget_dashboard_qualified_name ) @@ -552,9 +487,9 @@ def _populate_sisense_widget_attrs( def _extract_sisense_widget_attrs(attrs: SisenseWidgetAttributes) -> dict: """Extract all SisenseWidget attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sisense_widget_column_count"] = attrs.sisense_widget_column_count - result["sisense_widget_sub_type"] = attrs.sisense_widget_sub_type - result["sisense_widget_size"] = attrs.sisense_widget_size + result["sisense_column_count"] = attrs.sisense_column_count + result["sisense_sub_type"] = attrs.sisense_sub_type + result["sisense_size"] = attrs.sisense_size result["sisense_widget_dashboard_qualified_name"] = ( attrs.sisense_widget_dashboard_qualified_name ) @@ -597,9 +532,6 @@ def _sisense_widget_to_nested(sisense_widget: SisenseWidget) -> SisenseWidgetNes is_incomplete=sisense_widget.is_incomplete, provenance_type=sisense_widget.provenance_type, home_id=sisense_widget.home_id, - depth=sisense_widget.depth, - immediate_upstream=sisense_widget.immediate_upstream, - immediate_downstream=sisense_widget.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -633,6 +565,7 @@ def _sisense_widget_from_nested(nested: SisenseWidgetNested) -> SisenseWidget: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -641,9 +574,6 @@ def _sisense_widget_from_nested(nested: SisenseWidgetNested) -> SisenseWidget: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_widget_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -673,15 +603,11 @@ def _sisense_widget_from_nested_bytes(data: bytes, serde: Serde) -> SisenseWidge RelationField, ) -SisenseWidget.SISENSE_WIDGET_COLUMN_COUNT = NumericField( - "sisenseWidgetColumnCount", "sisenseWidgetColumnCount" -) -SisenseWidget.SISENSE_WIDGET_SUB_TYPE = KeywordField( - "sisenseWidgetSubType", "sisenseWidgetSubType" -) -SisenseWidget.SISENSE_WIDGET_SIZE = KeywordField( - "sisenseWidgetSize", "sisenseWidgetSize" +SisenseWidget.SISENSE_COLUMN_COUNT = NumericField( + "sisenseColumnCount", "sisenseColumnCount" ) +SisenseWidget.SISENSE_SUB_TYPE = KeywordField("sisenseSubType", "sisenseSubType") +SisenseWidget.SISENSE_SIZE = KeywordField("sisenseSize", "sisenseSize") SisenseWidget.SISENSE_WIDGET_DASHBOARD_QUALIFIED_NAME = KeywordTextField( "sisenseWidgetDashboardQualifiedName", "sisenseWidgetDashboardQualifiedName", diff --git a/pyatlan_v9/model/assets/snowflake.py b/pyatlan_v9/model/assets/snowflake.py index 9c902b6f8..1aed74634 100644 --- a/pyatlan_v9/model/assets/snowflake.py +++ b/pyatlan_v9/model/assets/snowflake.py @@ -54,7 +54,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .snowflake_related import RelatedSnowflake, RelatedSnowflakeSemanticLogicalTable +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -123,6 +123,8 @@ class Snowflake(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Snowflake" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -299,66 +301,6 @@ class Snowflake(Asset): def __post_init__(self) -> None: self.type_name = "Snowflake" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Snowflake instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Snowflake validation failed: {errors}") - - def minimize(self) -> "Snowflake": - """ - Return a minimal copy of this Snowflake with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Snowflake with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Snowflake instance with only the minimum required fields. - """ - self.validate() - return Snowflake(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSnowflake": - """ - Create a :class:`RelatedSnowflake` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflake reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflake(guid=self.guid) - return RelatedSnowflake(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -731,9 +673,6 @@ def _snowflake_to_nested(snowflake: Snowflake) -> SnowflakeNested: is_incomplete=snowflake.is_incomplete, provenance_type=snowflake.provenance_type, home_id=snowflake.home_id, - depth=snowflake.depth, - immediate_upstream=snowflake.immediate_upstream, - immediate_downstream=snowflake.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -765,6 +704,7 @@ def _snowflake_from_nested(nested: SnowflakeNested) -> Snowflake: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -773,9 +713,6 @@ def _snowflake_from_nested(nested: SnowflakeNested) -> Snowflake: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/snowflake_ai_model_context.py b/pyatlan_v9/model/assets/snowflake_ai_model_context.py index cdf8cc3a0..f3670894e 100644 --- a/pyatlan_v9/model/assets/snowflake_ai_model_context.py +++ b/pyatlan_v9/model/assets/snowflake_ai_model_context.py @@ -57,7 +57,6 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .snowflake_related import ( - RelatedSnowflakeAIModelContext, RelatedSnowflakeAIModelVersion, RelatedSnowflakeSemanticLogicalTable, ) @@ -144,6 +143,8 @@ class SnowflakeAIModelContext(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeAIModelContext" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -390,80 +391,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeAIModelContext instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeAIModelContext validation failed: {errors}") - - def minimize(self) -> "SnowflakeAIModelContext": - """ - Return a minimal copy of this SnowflakeAIModelContext with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeAIModelContext with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeAIModelContext instance with only the minimum required fields. - """ - self.validate() - return SnowflakeAIModelContext( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeAIModelContext": - """ - Create a :class:`RelatedSnowflakeAIModelContext` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeAIModelContext reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeAIModelContext(guid=self.guid) - return RelatedSnowflakeAIModelContext(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -944,9 +871,6 @@ def _snowflake_ai_model_context_to_nested( is_incomplete=snowflake_ai_model_context.is_incomplete, provenance_type=snowflake_ai_model_context.provenance_type, home_id=snowflake_ai_model_context.home_id, - depth=snowflake_ai_model_context.depth, - immediate_upstream=snowflake_ai_model_context.immediate_upstream, - immediate_downstream=snowflake_ai_model_context.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -982,6 +906,7 @@ def _snowflake_ai_model_context_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -990,9 +915,6 @@ def _snowflake_ai_model_context_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_ai_model_context_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/snowflake_ai_model_version.py b/pyatlan_v9/model/assets/snowflake_ai_model_version.py index 4a1e83bca..164a64a19 100644 --- a/pyatlan_v9/model/assets/snowflake_ai_model_version.py +++ b/pyatlan_v9/model/assets/snowflake_ai_model_version.py @@ -58,7 +58,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .snowflake_related import ( RelatedSnowflakeAIModelContext, - RelatedSnowflakeAIModelVersion, RelatedSnowflakeSemanticLogicalTable, ) from .soda_related import RelatedSodaCheck @@ -75,11 +74,11 @@ class SnowflakeAIModelVersion(Asset): Instance of an ai model version in snowflake. """ - SNOWFLAKE_AI_MODEL_VERSION_NAME: ClassVar[Any] = None - SNOWFLAKE_AI_MODEL_VERSION_TYPE: ClassVar[Any] = None - SNOWFLAKE_AI_MODEL_VERSION_ALIASES: ClassVar[Any] = None - SNOWFLAKE_AI_MODEL_VERSION_METRICS: ClassVar[Any] = None - SNOWFLAKE_AI_MODEL_VERSION_FUNCTIONS: ClassVar[Any] = None + SNOWFLAKE_NAME: ClassVar[Any] = None + SNOWFLAKE_TYPE: ClassVar[Any] = None + SNOWFLAKE_ALIASES: ClassVar[Any] = None + SNOWFLAKE_METRICS: ClassVar[Any] = None + SNOWFLAKE_FUNCTIONS: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -143,29 +142,21 @@ class SnowflakeAIModelVersion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - snowflake_ai_model_version_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionName" - ) + type_name: Union[str, UnsetType] = "SnowflakeAIModelVersion" + + snowflake_name: Union[str, None, UnsetType] = UNSET """Version part of the model name.""" - snowflake_ai_model_version_type: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionType" - ) + snowflake_type: Union[str, None, UnsetType] = UNSET """The type of the model version.""" - snowflake_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionAliases") - ) + snowflake_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases for the model version.""" - snowflake_ai_model_version_metrics: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionMetrics") - ) + snowflake_metrics: Union[Dict[str, str], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - snowflake_ai_model_version_functions: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionFunctions") - ) + snowflake_functions: Union[List[str], None, UnsetType] = UNSET """Functions used in the model version.""" query_count: Union[int, None, UnsetType] = UNSET @@ -395,82 +386,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeAIModelVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_ai_model_context is UNSET: - errors.append("snowflake_ai_model_context is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeAIModelVersion validation failed: {errors}") - - def minimize(self) -> "SnowflakeAIModelVersion": - """ - Return a minimal copy of this SnowflakeAIModelVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeAIModelVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeAIModelVersion instance with only the minimum required fields. - """ - self.validate() - return SnowflakeAIModelVersion( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeAIModelVersion": - """ - Create a :class:`RelatedSnowflakeAIModelVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeAIModelVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeAIModelVersion(guid=self.guid) - return RelatedSnowflakeAIModelVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -528,29 +443,19 @@ def from_json( class SnowflakeAIModelVersionAttributes(AssetAttributes): """SnowflakeAIModelVersion-specific attributes for nested API format.""" - snowflake_ai_model_version_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionName" - ) + snowflake_name: Union[str, None, UnsetType] = UNSET """Version part of the model name.""" - snowflake_ai_model_version_type: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionType" - ) + snowflake_type: Union[str, None, UnsetType] = UNSET """The type of the model version.""" - snowflake_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionAliases") - ) + snowflake_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases for the model version.""" - snowflake_ai_model_version_metrics: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionMetrics") - ) + snowflake_metrics: Union[Dict[str, str], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - snowflake_ai_model_version_functions: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionFunctions") - ) + snowflake_functions: Union[List[str], None, UnsetType] = UNSET """Functions used in the model version.""" query_count: Union[int, None, UnsetType] = UNSET @@ -840,13 +745,11 @@ def _populate_snowflake_ai_model_version_attrs( ) -> None: """Populate SnowflakeAIModelVersion-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.snowflake_ai_model_version_name = obj.snowflake_ai_model_version_name - attrs.snowflake_ai_model_version_type = obj.snowflake_ai_model_version_type - attrs.snowflake_ai_model_version_aliases = obj.snowflake_ai_model_version_aliases - attrs.snowflake_ai_model_version_metrics = obj.snowflake_ai_model_version_metrics - attrs.snowflake_ai_model_version_functions = ( - obj.snowflake_ai_model_version_functions - ) + attrs.snowflake_name = obj.snowflake_name + attrs.snowflake_type = obj.snowflake_type + attrs.snowflake_aliases = obj.snowflake_aliases + attrs.snowflake_metrics = obj.snowflake_metrics + attrs.snowflake_functions = obj.snowflake_functions attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -883,17 +786,11 @@ def _extract_snowflake_ai_model_version_attrs( ) -> dict: """Extract all SnowflakeAIModelVersion attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["snowflake_ai_model_version_name"] = attrs.snowflake_ai_model_version_name - result["snowflake_ai_model_version_type"] = attrs.snowflake_ai_model_version_type - result["snowflake_ai_model_version_aliases"] = ( - attrs.snowflake_ai_model_version_aliases - ) - result["snowflake_ai_model_version_metrics"] = ( - attrs.snowflake_ai_model_version_metrics - ) - result["snowflake_ai_model_version_functions"] = ( - attrs.snowflake_ai_model_version_functions - ) + result["snowflake_name"] = attrs.snowflake_name + result["snowflake_type"] = attrs.snowflake_type + result["snowflake_aliases"] = attrs.snowflake_aliases + result["snowflake_metrics"] = attrs.snowflake_metrics + result["snowflake_functions"] = attrs.snowflake_functions result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -967,9 +864,6 @@ def _snowflake_ai_model_version_to_nested( is_incomplete=snowflake_ai_model_version.is_incomplete, provenance_type=snowflake_ai_model_version.provenance_type, home_id=snowflake_ai_model_version.home_id, - depth=snowflake_ai_model_version.depth, - immediate_upstream=snowflake_ai_model_version.immediate_upstream, - immediate_downstream=snowflake_ai_model_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1005,6 +899,7 @@ def _snowflake_ai_model_version_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1013,9 +908,6 @@ def _snowflake_ai_model_version_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_ai_model_version_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1049,20 +941,16 @@ def _snowflake_ai_model_version_from_nested_bytes( RelationField, ) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_NAME = KeywordField( - "snowflakeAIModelVersionName", "snowflakeAIModelVersionName" -) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_TYPE = KeywordField( - "snowflakeAIModelVersionType", "snowflakeAIModelVersionType" -) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_ALIASES = KeywordField( - "snowflakeAIModelVersionAliases", "snowflakeAIModelVersionAliases" +SnowflakeAIModelVersion.SNOWFLAKE_NAME = KeywordField("snowflakeName", "snowflakeName") +SnowflakeAIModelVersion.SNOWFLAKE_TYPE = KeywordField("snowflakeType", "snowflakeType") +SnowflakeAIModelVersion.SNOWFLAKE_ALIASES = KeywordField( + "snowflakeAliases", "snowflakeAliases" ) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_METRICS = KeywordField( - "snowflakeAIModelVersionMetrics", "snowflakeAIModelVersionMetrics" +SnowflakeAIModelVersion.SNOWFLAKE_METRICS = KeywordField( + "snowflakeMetrics", "snowflakeMetrics" ) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_FUNCTIONS = KeywordField( - "snowflakeAIModelVersionFunctions", "snowflakeAIModelVersionFunctions" +SnowflakeAIModelVersion.SNOWFLAKE_FUNCTIONS = KeywordField( + "snowflakeFunctions", "snowflakeFunctions" ) SnowflakeAIModelVersion.QUERY_COUNT = NumericField("queryCount", "queryCount") SnowflakeAIModelVersion.QUERY_USER_COUNT = NumericField( diff --git a/pyatlan_v9/model/assets/snowflake_related.py b/pyatlan_v9/model/assets/snowflake_related.py index 7705f0ac7..a7a72aa2a 100644 --- a/pyatlan_v9/model/assets/snowflake_related.py +++ b/pyatlan_v9/model/assets/snowflake_related.py @@ -13,7 +13,6 @@ from typing import Dict, List, Union -import msgspec from msgspec import UNSET, UnsetType from .referenceable_related import RelatedReferenceable @@ -82,7 +81,7 @@ class RelatedSnowflakePipe(RelatedSnowflake): definition: Union[str, None, UnsetType] = UNSET """SQL definition of this pipe.""" - snowflake_pipe_is_auto_ingest_enabled: Union[bool, None, UnsetType] = UNSET + snowflake_is_auto_ingest_enabled: Union[bool, None, UnsetType] = UNSET """Whether auto-ingest is enabled for this pipe (true) or not (false).""" snowflake_pipe_notification_channel_name: Union[str, None, UnsetType] = UNSET @@ -103,16 +102,16 @@ class RelatedSnowflakeStage(RelatedSnowflake): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SnowflakeStage" so it serializes correctly - snowflake_stage_external_location: Union[str, None, UnsetType] = UNSET + snowflake_external_location: Union[str, None, UnsetType] = UNSET """The URL or cloud storage path specifying the external location where the stage data files are stored. This is NULL for internal stages.""" - snowflake_stage_external_location_region: Union[str, None, UnsetType] = UNSET + snowflake_external_location_region: Union[str, None, UnsetType] = UNSET """The geographic region identifier where the external stage is located in cloud storage. This is NULL for internal stages.""" - snowflake_stage_storage_integration: Union[str, None, UnsetType] = UNSET + snowflake_storage_integration: Union[str, None, UnsetType] = UNSET """The name of the storage integration associated with the stage; NULL for internal stages or stages that do not use a storage integration.""" - snowflake_stage_type: Union[str, None, UnsetType] = UNSET + snowflake_type: Union[str, None, UnsetType] = UNSET """Categorization of the stage type in Snowflake, which can be 'Internal Named' or 'External Named', indicating whether the stage storage is within Snowflake or in external cloud storage.""" def __post_init__(self) -> None: @@ -130,19 +129,19 @@ class RelatedSnowflakeStream(RelatedSnowflake): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SnowflakeStream" so it serializes correctly - snowflake_stream_type: Union[str, None, UnsetType] = UNSET + snowflake_type: Union[str, None, UnsetType] = UNSET """Type of this stream, for example: standard, append-only, insert-only, etc.""" - snowflake_stream_source_type: Union[str, None, UnsetType] = UNSET + snowflake_source_type: Union[str, None, UnsetType] = UNSET """Type of the source of this stream.""" - snowflake_stream_mode: Union[str, None, UnsetType] = UNSET + snowflake_mode: Union[str, None, UnsetType] = UNSET """Mode of this stream.""" - snowflake_stream_is_stale: Union[bool, None, UnsetType] = UNSET + snowflake_is_stale: Union[bool, None, UnsetType] = UNSET """Whether this stream is stale (true) or not (false).""" - snowflake_stream_stale_after: Union[int, None, UnsetType] = UNSET + snowflake_stale_after: Union[int, None, UnsetType] = UNSET """Time (epoch) after which this stream will be stale, in milliseconds.""" def __post_init__(self) -> None: @@ -190,29 +189,19 @@ class RelatedSnowflakeAIModelVersion(RelatedSnowflake): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SnowflakeAIModelVersion" so it serializes correctly - snowflake_ai_model_version_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionName" - ) + snowflake_name: Union[str, None, UnsetType] = UNSET """Version part of the model name.""" - snowflake_ai_model_version_type: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionType" - ) + snowflake_type: Union[str, None, UnsetType] = UNSET """The type of the model version.""" - snowflake_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionAliases") - ) + snowflake_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases for the model version.""" - snowflake_ai_model_version_metrics: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionMetrics") - ) + snowflake_metrics: Union[Dict[str, str], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - snowflake_ai_model_version_functions: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionFunctions") - ) + snowflake_functions: Union[List[str], None, UnsetType] = UNSET """Functions used in the model version.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/snowflake_semantic_dimension.py b/pyatlan_v9/model/assets/snowflake_semantic_dimension.py index 9207dd994..78d8c5265 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_dimension.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_dimension.py @@ -56,10 +56,7 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .semantic_related import RelatedSemanticModel -from .snowflake_related import ( - RelatedSnowflakeSemanticDimension, - RelatedSnowflakeSemanticLogicalTable, -) +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -141,6 +138,8 @@ class SnowflakeSemanticDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticDimension" + snowflake_semantic_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the semantic view in which this dimension exists.""" @@ -366,90 +365,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_semantic_logical_table is UNSET: - errors.append( - "snowflake_semantic_logical_table is required for creation" - ) - if self.snowflake_semantic_view_name is UNSET: - errors.append("snowflake_semantic_view_name is required for creation") - if self.snowflake_semantic_view_qualified_name is UNSET: - errors.append( - "snowflake_semantic_view_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeSemanticDimension validation failed: {errors}") - - def minimize(self) -> "SnowflakeSemanticDimension": - """ - Return a minimal copy of this SnowflakeSemanticDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticDimension instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticDimension( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeSemanticDimension": - """ - Create a :class:`RelatedSnowflakeSemanticDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticDimension(guid=self.guid) - return RelatedSnowflakeSemanticDimension(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -907,9 +822,6 @@ def _snowflake_semantic_dimension_to_nested( is_incomplete=snowflake_semantic_dimension.is_incomplete, provenance_type=snowflake_semantic_dimension.provenance_type, home_id=snowflake_semantic_dimension.home_id, - depth=snowflake_semantic_dimension.depth, - immediate_upstream=snowflake_semantic_dimension.immediate_upstream, - immediate_downstream=snowflake_semantic_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -945,6 +857,7 @@ def _snowflake_semantic_dimension_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -953,9 +866,6 @@ def _snowflake_semantic_dimension_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/snowflake_semantic_fact.py b/pyatlan_v9/model/assets/snowflake_semantic_fact.py index 99cb96cbd..1451182f1 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_fact.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_fact.py @@ -56,10 +56,7 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .semantic_related import RelatedSemanticModel -from .snowflake_related import ( - RelatedSnowflakeSemanticFact, - RelatedSnowflakeSemanticLogicalTable, -) +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -141,6 +138,8 @@ class SnowflakeSemanticFact(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticFact" + snowflake_semantic_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the semantic view in which this fact exists.""" @@ -366,88 +365,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticFact instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_semantic_logical_table is UNSET: - errors.append( - "snowflake_semantic_logical_table is required for creation" - ) - if self.snowflake_semantic_view_name is UNSET: - errors.append("snowflake_semantic_view_name is required for creation") - if self.snowflake_semantic_view_qualified_name is UNSET: - errors.append( - "snowflake_semantic_view_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeSemanticFact validation failed: {errors}") - - def minimize(self) -> "SnowflakeSemanticFact": - """ - Return a minimal copy of this SnowflakeSemanticFact with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticFact with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticFact instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticFact(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSnowflakeSemanticFact": - """ - Create a :class:`RelatedSnowflakeSemanticFact` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticFact reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticFact(guid=self.guid) - return RelatedSnowflakeSemanticFact(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -905,9 +822,6 @@ def _snowflake_semantic_fact_to_nested( is_incomplete=snowflake_semantic_fact.is_incomplete, provenance_type=snowflake_semantic_fact.provenance_type, home_id=snowflake_semantic_fact.home_id, - depth=snowflake_semantic_fact.depth, - immediate_upstream=snowflake_semantic_fact.immediate_upstream, - immediate_downstream=snowflake_semantic_fact.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -943,6 +857,7 @@ def _snowflake_semantic_fact_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -951,9 +866,6 @@ def _snowflake_semantic_fact_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_fact_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/snowflake_semantic_logical_table.py b/pyatlan_v9/model/assets/snowflake_semantic_logical_table.py index e71d64480..dc7b056ae 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_logical_table.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_logical_table.py @@ -151,6 +151,8 @@ class SnowflakeSemanticLogicalTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticLogicalTable" + snowflake_semantic_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the semantic view in which this logical table exists.""" @@ -402,90 +404,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticLogicalTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_semantic_view is UNSET: - errors.append("snowflake_semantic_view is required for creation") - if self.snowflake_semantic_view_name is UNSET: - errors.append("snowflake_semantic_view_name is required for creation") - if self.snowflake_semantic_view_qualified_name is UNSET: - errors.append( - "snowflake_semantic_view_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError( - f"SnowflakeSemanticLogicalTable validation failed: {errors}" - ) - - def minimize(self) -> "SnowflakeSemanticLogicalTable": - """ - Return a minimal copy of this SnowflakeSemanticLogicalTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticLogicalTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticLogicalTable instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticLogicalTable( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeSemanticLogicalTable": - """ - Create a :class:`RelatedSnowflakeSemanticLogicalTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticLogicalTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticLogicalTable(guid=self.guid) - return RelatedSnowflakeSemanticLogicalTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -986,9 +904,6 @@ def _snowflake_semantic_logical_table_to_nested( is_incomplete=snowflake_semantic_logical_table.is_incomplete, provenance_type=snowflake_semantic_logical_table.provenance_type, home_id=snowflake_semantic_logical_table.home_id, - depth=snowflake_semantic_logical_table.depth, - immediate_upstream=snowflake_semantic_logical_table.immediate_upstream, - immediate_downstream=snowflake_semantic_logical_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1024,6 +939,7 @@ def _snowflake_semantic_logical_table_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1032,9 +948,6 @@ def _snowflake_semantic_logical_table_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_logical_table_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/snowflake_semantic_metric.py b/pyatlan_v9/model/assets/snowflake_semantic_metric.py index e9bdb9e80..99a20b09a 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_metric.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_metric.py @@ -56,10 +56,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .snowflake_related import ( - RelatedSnowflakeSemanticLogicalTable, - RelatedSnowflakeSemanticMetric, -) +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .sql_related import RelatedColumn @@ -145,6 +142,8 @@ class SnowflakeSemanticMetric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticMetric" + snowflake_semantic_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the semantic view in which this metric exists.""" @@ -381,90 +380,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticMetric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_semantic_logical_table is UNSET: - errors.append( - "snowflake_semantic_logical_table is required for creation" - ) - if self.snowflake_semantic_view_name is UNSET: - errors.append("snowflake_semantic_view_name is required for creation") - if self.snowflake_semantic_view_qualified_name is UNSET: - errors.append( - "snowflake_semantic_view_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeSemanticMetric validation failed: {errors}") - - def minimize(self) -> "SnowflakeSemanticMetric": - """ - Return a minimal copy of this SnowflakeSemanticMetric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticMetric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticMetric instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticMetric( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeSemanticMetric": - """ - Create a :class:`RelatedSnowflakeSemanticMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticMetric(guid=self.guid) - return RelatedSnowflakeSemanticMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -949,9 +864,6 @@ def _snowflake_semantic_metric_to_nested( is_incomplete=snowflake_semantic_metric.is_incomplete, provenance_type=snowflake_semantic_metric.provenance_type, home_id=snowflake_semantic_metric.home_id, - depth=snowflake_semantic_metric.depth, - immediate_upstream=snowflake_semantic_metric.immediate_upstream, - immediate_downstream=snowflake_semantic_metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -987,6 +899,7 @@ def _snowflake_semantic_metric_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -995,9 +908,6 @@ def _snowflake_semantic_metric_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_metric_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/snowflake_semantic_view.py b/pyatlan_v9/model/assets/snowflake_semantic_view.py index e41fd7300..7412055af 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_view.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_view.py @@ -60,10 +60,7 @@ RelatedSemanticEntity, RelatedSemanticMeasure, ) -from .snowflake_related import ( - RelatedSnowflakeSemanticLogicalTable, - RelatedSnowflakeSemanticView, -) +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .sql_related import RelatedSchema @@ -138,6 +135,8 @@ class SnowflakeSemanticView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticView" + snowflake_definition: Union[str, None, UnsetType] = UNSET """DDL definition of the semantic view (via GET_DDL).""" @@ -337,78 +336,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeSemanticView validation failed: {errors}") - - def minimize(self) -> "SnowflakeSemanticView": - """ - Return a minimal copy of this SnowflakeSemanticView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticView instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSnowflakeSemanticView": - """ - Create a :class:`RelatedSnowflakeSemanticView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticView(guid=self.guid) - return RelatedSnowflakeSemanticView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -814,9 +741,6 @@ def _snowflake_semantic_view_to_nested( is_incomplete=snowflake_semantic_view.is_incomplete, provenance_type=snowflake_semantic_view.provenance_type, home_id=snowflake_semantic_view.home_id, - depth=snowflake_semantic_view.depth, - immediate_upstream=snowflake_semantic_view.immediate_upstream, - immediate_downstream=snowflake_semantic_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -852,6 +776,7 @@ def _snowflake_semantic_view_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -860,9 +785,6 @@ def _snowflake_semantic_view_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_view_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/soda.py b/pyatlan_v9/model/assets/soda.py index b4d086949..629e9631d 100644 --- a/pyatlan_v9/model/assets/soda.py +++ b/pyatlan_v9/model/assets/soda.py @@ -47,7 +47,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .soda_related import RelatedSoda, RelatedSodaCheck +from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob # ============================================================================= @@ -91,6 +91,8 @@ class Soda(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Soda" + dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET """Whether this data quality is part of contract (true) or not (false).""" @@ -189,66 +191,6 @@ class Soda(Asset): def __post_init__(self) -> None: self.type_name = "Soda" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Soda instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Soda validation failed: {errors}") - - def minimize(self) -> "Soda": - """ - Return a minimal copy of this Soda with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Soda with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Soda instance with only the minimum required fields. - """ - self.validate() - return Soda(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSoda": - """ - Create a :class:`RelatedSoda` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSoda reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSoda(guid=self.guid) - return RelatedSoda(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -496,9 +438,6 @@ def _soda_to_nested(soda: Soda) -> SodaNested: is_incomplete=soda.is_incomplete, provenance_type=soda.provenance_type, home_id=soda.home_id, - depth=soda.depth, - immediate_upstream=soda.immediate_upstream, - immediate_downstream=soda.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -528,6 +467,7 @@ def _soda_from_nested(nested: SodaNested) -> Soda: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -536,9 +476,6 @@ def _soda_from_nested(nested: SodaNested) -> Soda: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_soda_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/soda_check.py b/pyatlan_v9/model/assets/soda_check.py index 64656d0fc..917689319 100644 --- a/pyatlan_v9/model/assets/soda_check.py +++ b/pyatlan_v9/model/assets/soda_check.py @@ -63,12 +63,12 @@ class SodaCheck(Asset): Instance of a Soda check in Atlan. """ - SODA_CHECK_ID: ClassVar[Any] = None - SODA_CHECK_EVALUATION_STATUS: ClassVar[Any] = None + SODA_ID: ClassVar[Any] = None + SODA_EVALUATION_STATUS: ClassVar[Any] = None SODA_CHECK_DEFINITION: ClassVar[Any] = None - SODA_CHECK_LAST_SCAN_AT: ClassVar[Any] = None - SODA_CHECK_INCIDENT_COUNT: ClassVar[Any] = None - SODA_CHECK_LINKED_ASSET_QUALIFIED_NAME: ClassVar[Any] = None + SODA_LAST_SCAN_AT: ClassVar[Any] = None + SODA_INCIDENT_COUNT: ClassVar[Any] = None + SODA_LINKED_ASSET_QUALIFIED_NAME: ClassVar[Any] = None DQ_IS_PART_OF_CONTRACT: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None @@ -101,22 +101,24 @@ class SodaCheck(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - soda_check_id: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SodaCheck" + + soda_id: Union[str, None, UnsetType] = UNSET """Identifier of the check in Soda.""" - soda_check_evaluation_status: Union[str, None, UnsetType] = UNSET + soda_evaluation_status: Union[str, None, UnsetType] = UNSET """Status of the check in Soda.""" soda_check_definition: Union[str, None, UnsetType] = UNSET """Definition of the check in Soda.""" - soda_check_last_scan_at: Union[int, None, UnsetType] = UNSET + soda_last_scan_at: Union[int, None, UnsetType] = UNSET """""" - soda_check_incident_count: Union[int, None, UnsetType] = UNSET + soda_incident_count: Union[int, None, UnsetType] = UNSET """""" - soda_check_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET + soda_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET """QualifiedName of the asset associated with the check.""" dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET @@ -223,66 +225,6 @@ class SodaCheck(Asset): def __post_init__(self) -> None: self.type_name = "SodaCheck" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SodaCheck instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SodaCheck validation failed: {errors}") - - def minimize(self) -> "SodaCheck": - """ - Return a minimal copy of this SodaCheck with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SodaCheck with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SodaCheck instance with only the minimum required fields. - """ - self.validate() - return SodaCheck(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSodaCheck": - """ - Create a :class:`RelatedSodaCheck` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSodaCheck reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSodaCheck(guid=self.guid) - return RelatedSodaCheck(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -338,22 +280,22 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SodaCheck: class SodaCheckAttributes(AssetAttributes): """SodaCheck-specific attributes for nested API format.""" - soda_check_id: Union[str, None, UnsetType] = UNSET + soda_id: Union[str, None, UnsetType] = UNSET """Identifier of the check in Soda.""" - soda_check_evaluation_status: Union[str, None, UnsetType] = UNSET + soda_evaluation_status: Union[str, None, UnsetType] = UNSET """Status of the check in Soda.""" soda_check_definition: Union[str, None, UnsetType] = UNSET """Definition of the check in Soda.""" - soda_check_last_scan_at: Union[int, None, UnsetType] = UNSET + soda_last_scan_at: Union[int, None, UnsetType] = UNSET """""" - soda_check_incident_count: Union[int, None, UnsetType] = UNSET + soda_incident_count: Union[int, None, UnsetType] = UNSET """""" - soda_check_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET + soda_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET """QualifiedName of the asset associated with the check.""" dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET @@ -517,28 +459,24 @@ class SodaCheckNested(AssetNested): def _populate_soda_check_attrs(attrs: SodaCheckAttributes, obj: SodaCheck) -> None: """Populate SodaCheck-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.soda_check_id = obj.soda_check_id - attrs.soda_check_evaluation_status = obj.soda_check_evaluation_status + attrs.soda_id = obj.soda_id + attrs.soda_evaluation_status = obj.soda_evaluation_status attrs.soda_check_definition = obj.soda_check_definition - attrs.soda_check_last_scan_at = obj.soda_check_last_scan_at - attrs.soda_check_incident_count = obj.soda_check_incident_count - attrs.soda_check_linked_asset_qualified_name = ( - obj.soda_check_linked_asset_qualified_name - ) + attrs.soda_last_scan_at = obj.soda_last_scan_at + attrs.soda_incident_count = obj.soda_incident_count + attrs.soda_linked_asset_qualified_name = obj.soda_linked_asset_qualified_name attrs.dq_is_part_of_contract = obj.dq_is_part_of_contract def _extract_soda_check_attrs(attrs: SodaCheckAttributes) -> dict: """Extract all SodaCheck attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["soda_check_id"] = attrs.soda_check_id - result["soda_check_evaluation_status"] = attrs.soda_check_evaluation_status + result["soda_id"] = attrs.soda_id + result["soda_evaluation_status"] = attrs.soda_evaluation_status result["soda_check_definition"] = attrs.soda_check_definition - result["soda_check_last_scan_at"] = attrs.soda_check_last_scan_at - result["soda_check_incident_count"] = attrs.soda_check_incident_count - result["soda_check_linked_asset_qualified_name"] = ( - attrs.soda_check_linked_asset_qualified_name - ) + result["soda_last_scan_at"] = attrs.soda_last_scan_at + result["soda_incident_count"] = attrs.soda_incident_count + result["soda_linked_asset_qualified_name"] = attrs.soda_linked_asset_qualified_name result["dq_is_part_of_contract"] = attrs.dq_is_part_of_contract return result @@ -576,9 +514,6 @@ def _soda_check_to_nested(soda_check: SodaCheck) -> SodaCheckNested: is_incomplete=soda_check.is_incomplete, provenance_type=soda_check.provenance_type, home_id=soda_check.home_id, - depth=soda_check.depth, - immediate_upstream=soda_check.immediate_upstream, - immediate_downstream=soda_check.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -610,6 +545,7 @@ def _soda_check_from_nested(nested: SodaCheckNested) -> SodaCheck: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -618,9 +554,6 @@ def _soda_check_from_nested(nested: SodaCheckNested) -> SodaCheck: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_soda_check_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -648,21 +581,17 @@ def _soda_check_from_nested_bytes(data: bytes, serde: Serde) -> SodaCheck: RelationField, ) -SodaCheck.SODA_CHECK_ID = KeywordField("sodaCheckId", "sodaCheckId") -SodaCheck.SODA_CHECK_EVALUATION_STATUS = KeywordField( - "sodaCheckEvaluationStatus", "sodaCheckEvaluationStatus" +SodaCheck.SODA_ID = KeywordField("sodaId", "sodaId") +SodaCheck.SODA_EVALUATION_STATUS = KeywordField( + "sodaEvaluationStatus", "sodaEvaluationStatus" ) SodaCheck.SODA_CHECK_DEFINITION = KeywordField( "sodaCheckDefinition", "sodaCheckDefinition" ) -SodaCheck.SODA_CHECK_LAST_SCAN_AT = NumericField( - "sodaCheckLastScanAt", "sodaCheckLastScanAt" -) -SodaCheck.SODA_CHECK_INCIDENT_COUNT = NumericField( - "sodaCheckIncidentCount", "sodaCheckIncidentCount" -) -SodaCheck.SODA_CHECK_LINKED_ASSET_QUALIFIED_NAME = KeywordField( - "sodaCheckLinkedAssetQualifiedName", "sodaCheckLinkedAssetQualifiedName" +SodaCheck.SODA_LAST_SCAN_AT = NumericField("sodaLastScanAt", "sodaLastScanAt") +SodaCheck.SODA_INCIDENT_COUNT = NumericField("sodaIncidentCount", "sodaIncidentCount") +SodaCheck.SODA_LINKED_ASSET_QUALIFIED_NAME = KeywordField( + "sodaLinkedAssetQualifiedName", "sodaLinkedAssetQualifiedName" ) SodaCheck.DQ_IS_PART_OF_CONTRACT = BooleanField( "dqIsPartOfContract", "dqIsPartOfContract" diff --git a/pyatlan_v9/model/assets/soda_related.py b/pyatlan_v9/model/assets/soda_related.py index 9c4431e95..43979f158 100644 --- a/pyatlan_v9/model/assets/soda_related.py +++ b/pyatlan_v9/model/assets/soda_related.py @@ -49,22 +49,22 @@ class RelatedSodaCheck(RelatedSoda): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SodaCheck" so it serializes correctly - soda_check_id: Union[str, None, UnsetType] = UNSET + soda_id: Union[str, None, UnsetType] = UNSET """Identifier of the check in Soda.""" - soda_check_evaluation_status: Union[str, None, UnsetType] = UNSET + soda_evaluation_status: Union[str, None, UnsetType] = UNSET """Status of the check in Soda.""" soda_check_definition: Union[str, None, UnsetType] = UNSET """Definition of the check in Soda.""" - soda_check_last_scan_at: Union[int, None, UnsetType] = UNSET + soda_last_scan_at: Union[int, None, UnsetType] = UNSET """""" - soda_check_incident_count: Union[int, None, UnsetType] = UNSET + soda_incident_count: Union[int, None, UnsetType] = UNSET """""" - soda_check_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET + soda_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET """QualifiedName of the asset associated with the check.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/source_tag.py b/pyatlan_v9/model/assets/source_tag.py index 6da3c1775..ad353236b 100644 --- a/pyatlan_v9/model/assets/source_tag.py +++ b/pyatlan_v9/model/assets/source_tag.py @@ -49,7 +49,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tag_related import RelatedSourceTag # ============================================================================= # FLAT ASSET CLASS @@ -96,6 +95,8 @@ class SourceTag(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SourceTag" + tag_custom_configuration: Union[str, None, UnsetType] = UNSET """Specifies custom configuration elements based on the system the tag is being imported from.""" @@ -206,73 +207,6 @@ class SourceTag(Asset): def __post_init__(self) -> None: self.type_name = "SourceTag" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SourceTag instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.tag_id is UNSET: - errors.append("tag_id is required for creation") - if self.tag_allowed_values is UNSET: - errors.append("tag_allowed_values is required for creation") - if self.mapped_classification_name is UNSET: - errors.append("mapped_classification_name is required for creation") - if errors: - raise ValueError(f"SourceTag validation failed: {errors}") - - def minimize(self) -> "SourceTag": - """ - Return a minimal copy of this SourceTag with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SourceTag with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SourceTag instance with only the minimum required fields. - """ - self.validate() - return SourceTag(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSourceTag": - """ - Create a :class:`RelatedSourceTag` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSourceTag reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSourceTag(guid=self.guid) - return RelatedSourceTag(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -544,9 +478,6 @@ def _source_tag_to_nested(source_tag: SourceTag) -> SourceTagNested: is_incomplete=source_tag.is_incomplete, provenance_type=source_tag.provenance_type, home_id=source_tag.home_id, - depth=source_tag.depth, - immediate_upstream=source_tag.immediate_upstream, - immediate_downstream=source_tag.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -578,6 +509,7 @@ def _source_tag_from_nested(nested: SourceTagNested) -> SourceTag: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -586,9 +518,6 @@ def _source_tag_from_nested(nested: SourceTagNested) -> SourceTag: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_source_tag_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/spark.py b/pyatlan_v9/model/assets/spark.py index dce891d29..344771b57 100644 --- a/pyatlan_v9/model/assets/spark.py +++ b/pyatlan_v9/model/assets/spark.py @@ -48,7 +48,7 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck -from .spark_related import RelatedSpark, RelatedSparkJob +from .spark_related import RelatedSparkJob # ============================================================================= # FLAT ASSET CLASS @@ -96,6 +96,8 @@ class Spark(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_BY_AIRFLOW_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Spark" + spark_run_version: Union[str, None, UnsetType] = UNSET """Spark Version for the Spark Job run eg. 3.4.1""" @@ -211,66 +213,6 @@ class Spark(Asset): def __post_init__(self) -> None: self.type_name = "Spark" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Spark instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Spark validation failed: {errors}") - - def minimize(self) -> "Spark": - """ - Return a minimal copy of this Spark with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Spark with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Spark instance with only the minimum required fields. - """ - self.validate() - return Spark(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSpark": - """ - Create a :class:`RelatedSpark` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSpark reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSpark(guid=self.guid) - return RelatedSpark(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -548,9 +490,6 @@ def _spark_to_nested(spark: Spark) -> SparkNested: is_incomplete=spark.is_incomplete, provenance_type=spark.provenance_type, home_id=spark.home_id, - depth=spark.depth, - immediate_upstream=spark.immediate_upstream, - immediate_downstream=spark.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -580,6 +519,7 @@ def _spark_from_nested(nested: SparkNested) -> Spark: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -588,9 +528,6 @@ def _spark_from_nested(nested: SparkNested) -> Spark: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_spark_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/spark_job.py b/pyatlan_v9/model/assets/spark_job.py index 69a65eabc..3b97e1150 100644 --- a/pyatlan_v9/model/assets/spark_job.py +++ b/pyatlan_v9/model/assets/spark_job.py @@ -103,6 +103,8 @@ class SparkJob(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_BY_AIRFLOW_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SparkJob" + spark_app_name: Union[str, None, UnsetType] = UNSET """Name of the Spark app containing this Spark Job For eg. extract_raw_data""" @@ -239,72 +241,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SparkJob instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.process is UNSET: - errors.append("process is required for creation") - if errors: - raise ValueError(f"SparkJob validation failed: {errors}") - - def minimize(self) -> "SparkJob": - """ - Return a minimal copy of this SparkJob with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SparkJob with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SparkJob instance with only the minimum required fields. - """ - self.validate() - return SparkJob(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSparkJob": - """ - Create a :class:`RelatedSparkJob` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSparkJob reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSparkJob(guid=self.guid) - return RelatedSparkJob(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -604,9 +540,6 @@ def _spark_job_to_nested(spark_job: SparkJob) -> SparkJobNested: is_incomplete=spark_job.is_incomplete, provenance_type=spark_job.provenance_type, home_id=spark_job.home_id, - depth=spark_job.depth, - immediate_upstream=spark_job.immediate_upstream, - immediate_downstream=spark_job.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -638,6 +571,7 @@ def _spark_job_from_nested(nested: SparkJobNested) -> SparkJob: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -646,9 +580,6 @@ def _spark_job_from_nested(nested: SparkJobNested) -> SparkJob: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_spark_job_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sql.py b/pyatlan_v9/model/assets/sql.py index 4fce3e6d2..a3771328d 100644 --- a/pyatlan_v9/model/assets/sql.py +++ b/pyatlan_v9/model/assets/sql.py @@ -57,7 +57,6 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_related import RelatedSQL # ============================================================================= # FLAT ASSET CLASS @@ -124,6 +123,8 @@ class SQL(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SQL" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -300,66 +301,6 @@ class SQL(Asset): def __post_init__(self) -> None: self.type_name = "SQL" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SQL instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SQL validation failed: {errors}") - - def minimize(self) -> "SQL": - """ - Return a minimal copy of this SQL with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SQL with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SQL instance with only the minimum required fields. - """ - self.validate() - return SQL(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSQL": - """ - Create a :class:`RelatedSQL` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSQL reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSQL(guid=self.guid) - return RelatedSQL(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -728,9 +669,6 @@ def _sql_to_nested(sql: SQL) -> SQLNested: is_incomplete=sql.is_incomplete, provenance_type=sql.provenance_type, home_id=sql.home_id, - depth=sql.depth, - immediate_upstream=sql.immediate_upstream, - immediate_downstream=sql.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -760,6 +698,7 @@ def _sql_from_nested(nested: SQLNested) -> SQL: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -768,9 +707,6 @@ def _sql_from_nested(nested: SQLNested) -> SQL: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sql_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/sql_related.py b/pyatlan_v9/model/assets/sql_related.py index 0e7148484..99084ed53 100644 --- a/pyatlan_v9/model/assets/sql_related.py +++ b/pyatlan_v9/model/assets/sql_related.py @@ -119,16 +119,16 @@ class RelatedCalculationView(RelatedSQL): column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this calculation view.""" - calculation_view_version_id: Union[int, None, UnsetType] = UNSET + sql_version_id: Union[int, None, UnsetType] = UNSET """The version ID of this calculation view.""" - calculation_view_activated_by: Union[str, None, UnsetType] = UNSET + sql_activated_by: Union[str, None, UnsetType] = UNSET """The owner who activated the calculation view""" - calculation_view_activated_at: Union[int, None, UnsetType] = UNSET + sql_activated_at: Union[int, None, UnsetType] = UNSET """Time at which this calculation view was activated at""" - calculation_view_package_id: Union[str, None, UnsetType] = UNSET + sql_package_id: Union[str, None, UnsetType] = UNSET """The full package id path to which a calculation view belongs/resides in the repository.""" def __post_init__(self) -> None: @@ -152,10 +152,10 @@ class RelatedColumn(RelatedSQL): sub_data_type: Union[str, None, UnsetType] = UNSET """Sub-data type of this column.""" - column_compression: Union[str, None, UnsetType] = UNSET + sql_compression: Union[str, None, UnsetType] = UNSET """Compression type of this column.""" - column_encoding: Union[str, None, UnsetType] = UNSET + sql_encoding: Union[str, None, UnsetType] = UNSET """Encoding type of this column.""" raw_data_type_definition: Union[str, None, UnsetType] = UNSET @@ -230,112 +230,112 @@ class RelatedColumn(RelatedSQL): parent_column_name: Union[str, None, UnsetType] = UNSET """Simple name of the column this column is nested within, for STRUCT and NESTED columns.""" - column_distinct_values_count: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" - column_max: Union[float, None, UnsetType] = UNSET + sql_max: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min: Union[float, None, UnsetType] = UNSET + sql_min: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean: Union[float, None, UnsetType] = UNSET + sql_mean: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum: Union[float, None, UnsetType] = UNSET + sql_sum: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median: Union[float, None, UnsetType] = UNSET + sql_median: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation: Union[float, None, UnsetType] = UNSET + sql_standard_deviation: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_unique_values_count: Union[int, None, UnsetType] = UNSET + sql_unique_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_unique_values_count_long: Union[int, None, UnsetType] = UNSET + sql_unique_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_average: Union[float, None, UnsetType] = UNSET + sql_average: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_average_length: Union[float, None, UnsetType] = UNSET + sql_average_length: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_duplicate_values_count: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_maximum_string_length: Union[int, None, UnsetType] = UNSET + sql_maximum_string_length: Union[int, None, UnsetType] = UNSET """Length of the longest value in a string column.""" column_maxs: Union[List[str], None, UnsetType] = UNSET """List of the greatest values in a column.""" - column_minimum_string_length: Union[int, None, UnsetType] = UNSET + sql_minimum_string_length: Union[int, None, UnsetType] = UNSET """Length of the shortest value in a string column.""" column_mins: Union[List[str], None, UnsetType] = UNSET """List of the least values in a column.""" - column_missing_values_count: Union[int, None, UnsetType] = UNSET + sql_missing_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_count_long: Union[int, None, UnsetType] = UNSET + sql_missing_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_percentage: Union[float, None, UnsetType] = UNSET + sql_missing_values_percentage: Union[float, None, UnsetType] = UNSET """Percentage of rows in a column that do not contain content.""" - column_uniqueness_percentage: Union[float, None, UnsetType] = UNSET + sql_uniqueness_percentage: Union[float, None, UnsetType] = UNSET """Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique.""" - column_variance: Union[float, None, UnsetType] = UNSET + sql_variance: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" column_top_values: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """List of top values in this column.""" - column_max_value: Union[float, None, UnsetType] = UNSET + sql_max_value: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min_value: Union[float, None, UnsetType] = UNSET + sql_min_value: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean_value: Union[float, None, UnsetType] = UNSET + sql_mean_value: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum_value: Union[float, None, UnsetType] = UNSET + sql_sum_value: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median_value: Union[float, None, UnsetType] = UNSET + sql_median_value: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation_value: Union[float, None, UnsetType] = UNSET + sql_standard_deviation_value: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_average_value: Union[float, None, UnsetType] = UNSET + sql_average_value: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_variance_value: Union[float, None, UnsetType] = UNSET + sql_variance_value: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" - column_average_length_value: Union[float, None, UnsetType] = UNSET + sql_average_length_value: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """Detailed information representing a histogram of values for a column.""" - column_depth_level: Union[int, None, UnsetType] = UNSET + sql_depth_level: Union[int, None, UnsetType] = UNSET """Level of nesting of this column, used for STRUCT and NESTED columns.""" nosql_collection_name: Union[str, None, UnsetType] = UNSET @@ -344,10 +344,10 @@ class RelatedColumn(RelatedSQL): nosql_collection_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the cosmos/mongo collection in which this SQL asset (column) exists, or empty if it does not exist within a cosmos/mongo collection.""" - column_is_measure: Union[bool, None, UnsetType] = UNSET + sql_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is of type measure/calculated.""" - column_measure_type: Union[str, None, UnsetType] = UNSET + sql_measure_type: Union[str, None, UnsetType] = UNSET """The type of measure/calculated column this is, eg: base, calculated, derived.""" def __post_init__(self) -> None: @@ -386,45 +386,45 @@ class RelatedFunction(RelatedSQL): function_definition: Union[str, None, UnsetType] = UNSET """Code or set of statements that determine the output of the function.""" - function_return_type: Union[str, None, UnsetType] = UNSET + sql_return_type: Union[str, None, UnsetType] = UNSET """Data type of the value returned by the function.""" - function_arguments: Union[List[str], None, UnsetType] = UNSET + sql_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the function.""" - function_language: Union[str, None, UnsetType] = UNSET + sql_language: Union[str, None, UnsetType] = UNSET """Programming language in which the function is written.""" - function_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of function.""" - function_is_external: Union[bool, None, UnsetType] = UNSET + sql_is_external: Union[bool, None, UnsetType] = UNSET """Whether the function is stored or executed externally (true) or internally (false).""" - function_is_dmf: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="functionIsDMF" + sql_is_dmf: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlIsDMF" ) """Whether the function is a data metric function.""" - function_is_secure: Union[bool, None, UnsetType] = UNSET + sql_is_secure: Union[bool, None, UnsetType] = UNSET """Whether sensitive information of the function is omitted for unauthorized users (true) or not (false).""" - function_is_memoizable: Union[bool, None, UnsetType] = UNSET + sql_is_memoizable: Union[bool, None, UnsetType] = UNSET """Whether the function must re-compute if there are no underlying changes in the values (false) or not (true).""" - function_runtime_version: Union[str, None, UnsetType] = UNSET + sql_runtime_version: Union[str, None, UnsetType] = UNSET """Version of the language runtime used by the function.""" - function_external_access_integrations: Union[str, None, UnsetType] = UNSET + sql_external_access_integrations: Union[str, None, UnsetType] = UNSET """Names of external access integrations used by the function.""" - function_secrets: Union[str, None, UnsetType] = UNSET + sql_secrets: Union[str, None, UnsetType] = UNSET """Secret variables used by the function.""" - function_packages: Union[str, None, UnsetType] = UNSET + sql_packages: Union[str, None, UnsetType] = UNSET """Packages requested by the function.""" - function_installed_packages: Union[str, None, UnsetType] = UNSET + sql_installed_packages: Union[str, None, UnsetType] = UNSET """Packages actually installed for the function.""" def __post_init__(self) -> None: @@ -598,7 +598,7 @@ class RelatedSchema(RelatedSQL): table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this schema.""" - schema_external_location: Union[str, None, UnsetType] = UNSET + sql_external_location: Union[str, None, UnsetType] = UNSET """External location of this schema, for example: an S3 object location.""" views_count: Union[int, None, UnsetType] = UNSET @@ -631,7 +631,7 @@ class RelatedTable(RelatedSQL): size_bytes: Union[int, None, UnsetType] = UNSET """Size of this table, in bytes.""" - table_object_count: Union[int, None, UnsetType] = UNSET + sql_object_count: Union[int, None, UnsetType] = UNSET """Number of objects in this table.""" alias: Union[str, None, UnsetType] = UNSET @@ -673,7 +673,7 @@ class RelatedTable(RelatedSQL): is_sharded: Union[bool, None, UnsetType] = UNSET """Whether this table is a sharded table (true) or not (false).""" - table_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of the table.""" iceberg_catalog_name: Union[str, None, UnsetType] = UNSET @@ -688,19 +688,19 @@ class RelatedTable(RelatedSQL): iceberg_catalog_table_name: Union[str, None, UnsetType] = UNSET """Catalog table name (actual table name on the catalog side).""" - table_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET + sql_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET """Extra attributes for Impala""" iceberg_catalog_table_namespace: Union[str, None, UnsetType] = UNSET """Catalog table namespace (actual database name on the catalog side).""" - table_external_volume_name: Union[str, None, UnsetType] = UNSET + sql_external_volume_name: Union[str, None, UnsetType] = UNSET """External volume name for the table.""" iceberg_table_base_location: Union[str, None, UnsetType] = UNSET """Iceberg table base location inside the external volume.""" - table_retention_time: Union[int, None, UnsetType] = UNSET + sql_retention_time: Union[int, None, UnsetType] = UNSET """Data retention time in days.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/starburst.py b/pyatlan_v9/model/assets/starburst.py index 03482b5cf..fa7ca6b84 100644 --- a/pyatlan_v9/model/assets/starburst.py +++ b/pyatlan_v9/model/assets/starburst.py @@ -57,7 +57,6 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .starburst_related import RelatedStarburst # ============================================================================= # FLAT ASSET CLASS @@ -127,6 +126,8 @@ class Starburst(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Starburst" + starburst_data_product_name: Union[str, None, UnsetType] = UNSET """Name of the Starburst Data Product that contains this asset.""" @@ -312,66 +313,6 @@ class Starburst(Asset): def __post_init__(self) -> None: self.type_name = "Starburst" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Starburst instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Starburst validation failed: {errors}") - - def minimize(self) -> "Starburst": - """ - Return a minimal copy of this Starburst with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Starburst with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Starburst instance with only the minimum required fields. - """ - self.validate() - return Starburst(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedStarburst": - """ - Create a :class:`RelatedStarburst` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedStarburst reference to this asset. - """ - if self.guid is not UNSET: - return RelatedStarburst(guid=self.guid) - return RelatedStarburst(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -759,9 +700,6 @@ def _starburst_to_nested(starburst: Starburst) -> StarburstNested: is_incomplete=starburst.is_incomplete, provenance_type=starburst.provenance_type, home_id=starburst.home_id, - depth=starburst.depth, - immediate_upstream=starburst.immediate_upstream, - immediate_downstream=starburst.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -793,6 +731,7 @@ def _starburst_from_nested(nested: StarburstNested) -> Starburst: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -801,9 +740,6 @@ def _starburst_from_nested(nested: StarburstNested) -> Starburst: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_starburst_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/starburst_dataset.py b/pyatlan_v9/model/assets/starburst_dataset.py index 8710b7a03..b42d14ab8 100644 --- a/pyatlan_v9/model/assets/starburst_dataset.py +++ b/pyatlan_v9/model/assets/starburst_dataset.py @@ -65,7 +65,7 @@ RelatedTable, RelatedTablePartition, ) -from .starburst_related import RelatedStarburstDataset, RelatedStarburstDatasetColumn +from .starburst_related import RelatedStarburstDatasetColumn # ============================================================================= # FLAT ASSET CLASS @@ -173,6 +173,8 @@ class StarburstDataset(Asset): STARBURST_DATA_PRODUCT: ClassVar[Any] = None STARBURST_DATASET_COLUMNS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "StarburstDataset" + starburst_is_materialized: Union[bool, None, UnsetType] = UNSET """Whether this dataset is a materialized view.""" @@ -480,70 +482,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this StarburstDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"StarburstDataset validation failed: {errors}") - - def minimize(self) -> "StarburstDataset": - """ - Return a minimal copy of this StarburstDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new StarburstDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new StarburstDataset instance with only the minimum required fields. - """ - self.validate() - return StarburstDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedStarburstDataset": - """ - Create a :class:`RelatedStarburstDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedStarburstDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedStarburstDataset(guid=self.guid) - return RelatedStarburstDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1125,9 +1063,6 @@ def _starburst_dataset_to_nested( is_incomplete=starburst_dataset.is_incomplete, provenance_type=starburst_dataset.provenance_type, home_id=starburst_dataset.home_id, - depth=starburst_dataset.depth, - immediate_upstream=starburst_dataset.immediate_upstream, - immediate_downstream=starburst_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1161,6 +1096,7 @@ def _starburst_dataset_from_nested(nested: StarburstDatasetNested) -> StarburstD updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1169,9 +1105,6 @@ def _starburst_dataset_from_nested(nested: StarburstDatasetNested) -> StarburstD is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_starburst_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/starburst_dataset_column.py b/pyatlan_v9/model/assets/starburst_dataset_column.py index b0185f423..374719b84 100644 --- a/pyatlan_v9/model/assets/starburst_dataset_column.py +++ b/pyatlan_v9/model/assets/starburst_dataset_column.py @@ -74,7 +74,7 @@ RelatedTablePartition, RelatedView, ) -from .starburst_related import RelatedStarburstDataset, RelatedStarburstDatasetColumn +from .starburst_related import RelatedStarburstDataset # ============================================================================= # FLAT ASSET CLASS @@ -234,6 +234,8 @@ class StarburstDatasetColumn(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None STARBURST_DATASET: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "StarburstDatasetColumn" + starburst_sql_column_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the corresponding SQL Column. Enables cross-stream lookup between the Data Product perspective and the SQL perspective of the same underlying column.""" @@ -707,82 +709,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this StarburstDatasetColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.starburst_dataset is UNSET: - errors.append("starburst_dataset is required for creation") - if self.starburst_dataset_name is UNSET: - errors.append("starburst_dataset_name is required for creation") - if self.starburst_dataset_qualified_name is UNSET: - errors.append( - "starburst_dataset_qualified_name is required for creation" - ) - if self.order is UNSET: - errors.append("order is required for creation") - if errors: - raise ValueError(f"StarburstDatasetColumn validation failed: {errors}") - - def minimize(self) -> "StarburstDatasetColumn": - """ - Return a minimal copy of this StarburstDatasetColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new StarburstDatasetColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new StarburstDatasetColumn instance with only the minimum required fields. - """ - self.validate() - return StarburstDatasetColumn( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedStarburstDatasetColumn": - """ - Create a :class:`RelatedStarburstDatasetColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedStarburstDatasetColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedStarburstDatasetColumn(guid=self.guid) - return RelatedStarburstDatasetColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1627,9 +1553,6 @@ def _starburst_dataset_column_to_nested( is_incomplete=starburst_dataset_column.is_incomplete, provenance_type=starburst_dataset_column.provenance_type, home_id=starburst_dataset_column.home_id, - depth=starburst_dataset_column.depth, - immediate_upstream=starburst_dataset_column.immediate_upstream, - immediate_downstream=starburst_dataset_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1665,6 +1588,7 @@ def _starburst_dataset_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1673,9 +1597,6 @@ def _starburst_dataset_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_starburst_dataset_column_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/table.py b/pyatlan_v9/model/assets/table.py index 35f6d5509..249633e5f 100644 --- a/pyatlan_v9/model/assets/table.py +++ b/pyatlan_v9/model/assets/table.py @@ -81,7 +81,7 @@ class Table(Asset): COLUMN_COUNT: ClassVar[Any] = None ROW_COUNT: ClassVar[Any] = None SIZE_BYTES: ClassVar[Any] = None - TABLE_OBJECT_COUNT: ClassVar[Any] = None + SQL_OBJECT_COUNT: ClassVar[Any] = None ALIAS: ClassVar[Any] = None IS_TEMPORARY: ClassVar[Any] = None IS_QUERY_PREVIEW: ClassVar[Any] = None @@ -95,16 +95,16 @@ class Table(Asset): TABLE_DEFINITION: ClassVar[Any] = None PARTITION_LIST: ClassVar[Any] = None IS_SHARDED: ClassVar[Any] = None - TABLE_TYPE: ClassVar[Any] = None + SQL_TYPE: ClassVar[Any] = None ICEBERG_CATALOG_NAME: ClassVar[Any] = None ICEBERG_TABLE_TYPE: ClassVar[Any] = None ICEBERG_CATALOG_SOURCE: ClassVar[Any] = None ICEBERG_CATALOG_TABLE_NAME: ClassVar[Any] = None - TABLE_IMPALA_PARAMETERS: ClassVar[Any] = None + SQL_IMPALA_PARAMETERS: ClassVar[Any] = None ICEBERG_CATALOG_TABLE_NAMESPACE: ClassVar[Any] = None - TABLE_EXTERNAL_VOLUME_NAME: ClassVar[Any] = None + SQL_EXTERNAL_VOLUME_NAME: ClassVar[Any] = None ICEBERG_TABLE_BASE_LOCATION: ClassVar[Any] = None - TABLE_RETENTION_TIME: ClassVar[Any] = None + SQL_RETENTION_TIME: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -165,6 +165,8 @@ class Table(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Table" + column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this table.""" @@ -174,7 +176,7 @@ class Table(Asset): size_bytes: Union[int, None, UnsetType] = UNSET """Size of this table, in bytes.""" - table_object_count: Union[int, None, UnsetType] = UNSET + sql_object_count: Union[int, None, UnsetType] = UNSET """Number of objects in this table.""" alias: Union[str, None, UnsetType] = UNSET @@ -216,7 +218,7 @@ class Table(Asset): is_sharded: Union[bool, None, UnsetType] = UNSET """Whether this table is a sharded table (true) or not (false).""" - table_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of the table.""" iceberg_catalog_name: Union[str, None, UnsetType] = UNSET @@ -231,19 +233,19 @@ class Table(Asset): iceberg_catalog_table_name: Union[str, None, UnsetType] = UNSET """Catalog table name (actual table name on the catalog side).""" - table_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET + sql_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET """Extra attributes for Impala""" iceberg_catalog_table_namespace: Union[str, None, UnsetType] = UNSET """Catalog table namespace (actual database name on the catalog side).""" - table_external_volume_name: Union[str, None, UnsetType] = UNSET + sql_external_volume_name: Union[str, None, UnsetType] = UNSET """External volume name for the table.""" iceberg_table_base_location: Union[str, None, UnsetType] = UNSET """Iceberg table base location inside the external volume.""" - table_retention_time: Union[int, None, UnsetType] = UNSET + sql_retention_time: Union[int, None, UnsetType] = UNSET """Data retention time in days.""" query_count: Union[int, None, UnsetType] = UNSET @@ -448,80 +450,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Table instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"Table validation failed: {errors}") - - def minimize(self) -> "Table": - """ - Return a minimal copy of this Table with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Table with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Table instance with only the minimum required fields. - """ - self.validate() - return Table(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTable": - """ - Create a :class:`RelatedTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTable(guid=self.guid) - return RelatedTable(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -661,7 +589,7 @@ class TableAttributes(AssetAttributes): size_bytes: Union[int, None, UnsetType] = UNSET """Size of this table, in bytes.""" - table_object_count: Union[int, None, UnsetType] = UNSET + sql_object_count: Union[int, None, UnsetType] = UNSET """Number of objects in this table.""" alias: Union[str, None, UnsetType] = UNSET @@ -703,7 +631,7 @@ class TableAttributes(AssetAttributes): is_sharded: Union[bool, None, UnsetType] = UNSET """Whether this table is a sharded table (true) or not (false).""" - table_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of the table.""" iceberg_catalog_name: Union[str, None, UnsetType] = UNSET @@ -718,19 +646,19 @@ class TableAttributes(AssetAttributes): iceberg_catalog_table_name: Union[str, None, UnsetType] = UNSET """Catalog table name (actual table name on the catalog side).""" - table_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET + sql_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET """Extra attributes for Impala""" iceberg_catalog_table_namespace: Union[str, None, UnsetType] = UNSET """Catalog table namespace (actual database name on the catalog side).""" - table_external_volume_name: Union[str, None, UnsetType] = UNSET + sql_external_volume_name: Union[str, None, UnsetType] = UNSET """External volume name for the table.""" iceberg_table_base_location: Union[str, None, UnsetType] = UNSET """Iceberg table base location inside the external volume.""" - table_retention_time: Union[int, None, UnsetType] = UNSET + sql_retention_time: Union[int, None, UnsetType] = UNSET """Data retention time in days.""" query_count: Union[int, None, UnsetType] = UNSET @@ -998,7 +926,7 @@ def _populate_table_attrs(attrs: TableAttributes, obj: Table) -> None: attrs.column_count = obj.column_count attrs.row_count = obj.row_count attrs.size_bytes = obj.size_bytes - attrs.table_object_count = obj.table_object_count + attrs.sql_object_count = obj.sql_object_count attrs.alias = obj.alias attrs.is_temporary = obj.is_temporary attrs.is_query_preview = obj.is_query_preview @@ -1012,16 +940,16 @@ def _populate_table_attrs(attrs: TableAttributes, obj: Table) -> None: attrs.table_definition = obj.table_definition attrs.partition_list = obj.partition_list attrs.is_sharded = obj.is_sharded - attrs.table_type = obj.table_type + attrs.sql_type = obj.sql_type attrs.iceberg_catalog_name = obj.iceberg_catalog_name attrs.iceberg_table_type = obj.iceberg_table_type attrs.iceberg_catalog_source = obj.iceberg_catalog_source attrs.iceberg_catalog_table_name = obj.iceberg_catalog_table_name - attrs.table_impala_parameters = obj.table_impala_parameters + attrs.sql_impala_parameters = obj.sql_impala_parameters attrs.iceberg_catalog_table_namespace = obj.iceberg_catalog_table_namespace - attrs.table_external_volume_name = obj.table_external_volume_name + attrs.sql_external_volume_name = obj.sql_external_volume_name attrs.iceberg_table_base_location = obj.iceberg_table_base_location - attrs.table_retention_time = obj.table_retention_time + attrs.sql_retention_time = obj.sql_retention_time attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -1048,7 +976,7 @@ def _extract_table_attrs(attrs: TableAttributes) -> dict: result["column_count"] = attrs.column_count result["row_count"] = attrs.row_count result["size_bytes"] = attrs.size_bytes - result["table_object_count"] = attrs.table_object_count + result["sql_object_count"] = attrs.sql_object_count result["alias"] = attrs.alias result["is_temporary"] = attrs.is_temporary result["is_query_preview"] = attrs.is_query_preview @@ -1062,16 +990,16 @@ def _extract_table_attrs(attrs: TableAttributes) -> dict: result["table_definition"] = attrs.table_definition result["partition_list"] = attrs.partition_list result["is_sharded"] = attrs.is_sharded - result["table_type"] = attrs.table_type + result["sql_type"] = attrs.sql_type result["iceberg_catalog_name"] = attrs.iceberg_catalog_name result["iceberg_table_type"] = attrs.iceberg_table_type result["iceberg_catalog_source"] = attrs.iceberg_catalog_source result["iceberg_catalog_table_name"] = attrs.iceberg_catalog_table_name - result["table_impala_parameters"] = attrs.table_impala_parameters + result["sql_impala_parameters"] = attrs.sql_impala_parameters result["iceberg_catalog_table_namespace"] = attrs.iceberg_catalog_table_namespace - result["table_external_volume_name"] = attrs.table_external_volume_name + result["sql_external_volume_name"] = attrs.sql_external_volume_name result["iceberg_table_base_location"] = attrs.iceberg_table_base_location - result["table_retention_time"] = attrs.table_retention_time + result["sql_retention_time"] = attrs.sql_retention_time result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -1128,9 +1056,6 @@ def _table_to_nested(table: Table) -> TableNested: is_incomplete=table.is_incomplete, provenance_type=table.provenance_type, home_id=table.home_id, - depth=table.depth, - immediate_upstream=table.immediate_upstream, - immediate_downstream=table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1160,6 +1085,7 @@ def _table_from_nested(nested: TableNested) -> Table: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1168,9 +1094,6 @@ def _table_from_nested(nested: TableNested) -> Table: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1201,7 +1124,7 @@ def _table_from_nested_bytes(data: bytes, serde: Serde) -> Table: Table.COLUMN_COUNT = NumericField("columnCount", "columnCount") Table.ROW_COUNT = NumericField("rowCount", "rowCount") Table.SIZE_BYTES = NumericField("sizeBytes", "sizeBytes") -Table.TABLE_OBJECT_COUNT = NumericField("tableObjectCount", "tableObjectCount") +Table.SQL_OBJECT_COUNT = NumericField("sqlObjectCount", "sqlObjectCount") Table.ALIAS = KeywordField("alias", "alias") Table.IS_TEMPORARY = BooleanField("isTemporary", "isTemporary") Table.IS_QUERY_PREVIEW = BooleanField("isQueryPreview", "isQueryPreview") @@ -1219,7 +1142,7 @@ def _table_from_nested_bytes(data: bytes, serde: Serde) -> Table: Table.TABLE_DEFINITION = KeywordField("tableDefinition", "tableDefinition") Table.PARTITION_LIST = KeywordField("partitionList", "partitionList") Table.IS_SHARDED = BooleanField("isSharded", "isSharded") -Table.TABLE_TYPE = KeywordField("tableType", "tableType") +Table.SQL_TYPE = KeywordField("sqlType", "sqlType") Table.ICEBERG_CATALOG_NAME = KeywordField("icebergCatalogName", "icebergCatalogName") Table.ICEBERG_TABLE_TYPE = KeywordField("icebergTableType", "icebergTableType") Table.ICEBERG_CATALOG_SOURCE = KeywordField( @@ -1228,19 +1151,17 @@ def _table_from_nested_bytes(data: bytes, serde: Serde) -> Table: Table.ICEBERG_CATALOG_TABLE_NAME = KeywordField( "icebergCatalogTableName", "icebergCatalogTableName" ) -Table.TABLE_IMPALA_PARAMETERS = KeywordField( - "tableImpalaParameters", "tableImpalaParameters" -) +Table.SQL_IMPALA_PARAMETERS = KeywordField("sqlImpalaParameters", "sqlImpalaParameters") Table.ICEBERG_CATALOG_TABLE_NAMESPACE = KeywordField( "icebergCatalogTableNamespace", "icebergCatalogTableNamespace" ) -Table.TABLE_EXTERNAL_VOLUME_NAME = KeywordField( - "tableExternalVolumeName", "tableExternalVolumeName" +Table.SQL_EXTERNAL_VOLUME_NAME = KeywordField( + "sqlExternalVolumeName", "sqlExternalVolumeName" ) Table.ICEBERG_TABLE_BASE_LOCATION = KeywordField( "icebergTableBaseLocation", "icebergTableBaseLocation" ) -Table.TABLE_RETENTION_TIME = NumericField("tableRetentionTime", "tableRetentionTime") +Table.SQL_RETENTION_TIME = NumericField("sqlRetentionTime", "sqlRetentionTime") Table.QUERY_COUNT = NumericField("queryCount", "queryCount") Table.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") Table.QUERY_USER_MAP = KeywordField("queryUserMap", "queryUserMap") diff --git a/pyatlan_v9/model/assets/table_partition.py b/pyatlan_v9/model/assets/table_partition.py index 731180253..295a726f6 100644 --- a/pyatlan_v9/model/assets/table_partition.py +++ b/pyatlan_v9/model/assets/table_partition.py @@ -145,6 +145,8 @@ class TablePartition(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TablePartition" + constraint: Union[str, None, UnsetType] = UNSET """Constraint that defines this table partition.""" @@ -386,84 +388,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TablePartition instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.parent_table is UNSET: - errors.append("parent_table is required for creation") - if self.table_name is UNSET: - errors.append("table_name is required for creation") - if self.table_qualified_name is UNSET: - errors.append("table_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"TablePartition validation failed: {errors}") - - def minimize(self) -> "TablePartition": - """ - Return a minimal copy of this TablePartition with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TablePartition with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TablePartition instance with only the minimum required fields. - """ - self.validate() - return TablePartition(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTablePartition": - """ - Create a :class:`RelatedTablePartition` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTablePartition reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTablePartition(guid=self.guid) - return RelatedTablePartition(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -1039,9 +963,6 @@ def _table_partition_to_nested(table_partition: TablePartition) -> TablePartitio is_incomplete=table_partition.is_incomplete, provenance_type=table_partition.provenance_type, home_id=table_partition.home_id, - depth=table_partition.depth, - immediate_upstream=table_partition.immediate_upstream, - immediate_downstream=table_partition.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1075,6 +996,7 @@ def _table_partition_from_nested(nested: TablePartitionNested) -> TablePartition updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1083,9 +1005,6 @@ def _table_partition_from_nested(nested: TablePartitionNested) -> TablePartition is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_table_partition_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau.py b/pyatlan_v9/model/assets/tableau.py index 45e0c40a9..b20397cb2 100644 --- a/pyatlan_v9/model/assets/tableau.py +++ b/pyatlan_v9/model/assets/tableau.py @@ -49,7 +49,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import RelatedTableau # ============================================================================= # FLAT ASSET CLASS @@ -92,6 +91,8 @@ class Tableau(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Tableau" + tableau_project_hierarchy_qualified_names: Union[List[str], None, UnsetType] = UNSET """Array of qualified names representing the project hierarchy for this Tableau asset.""" @@ -190,66 +191,6 @@ class Tableau(Asset): def __post_init__(self) -> None: self.type_name = "Tableau" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Tableau instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Tableau validation failed: {errors}") - - def minimize(self) -> "Tableau": - """ - Return a minimal copy of this Tableau with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Tableau with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Tableau instance with only the minimum required fields. - """ - self.validate() - return Tableau(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableau": - """ - Create a :class:`RelatedTableau` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableau reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableau(guid=self.guid) - return RelatedTableau(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -505,9 +446,6 @@ def _tableau_to_nested(tableau: Tableau) -> TableauNested: is_incomplete=tableau.is_incomplete, provenance_type=tableau.provenance_type, home_id=tableau.home_id, - depth=tableau.depth, - immediate_upstream=tableau.immediate_upstream, - immediate_downstream=tableau.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -537,6 +475,7 @@ def _tableau_from_nested(nested: TableauNested) -> Tableau: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -545,9 +484,6 @@ def _tableau_from_nested(nested: TableauNested) -> Tableau: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_calculated_field.py b/pyatlan_v9/model/assets/tableau_calculated_field.py index 196a33596..b0ec0ea3e 100644 --- a/pyatlan_v9/model/assets/tableau_calculated_field.py +++ b/pyatlan_v9/model/assets/tableau_calculated_field.py @@ -51,7 +51,6 @@ from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .tableau_related import ( - RelatedTableauCalculatedField, RelatedTableauDatasource, RelatedTableauWorksheet, RelatedTableauWorksheetField, @@ -112,6 +111,8 @@ class TableauCalculatedField(Asset): DATASOURCE: ClassVar[Any] = None WORKSHEETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauCalculatedField" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this calculated field exists.""" @@ -262,80 +263,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauCalculatedField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.datasource is UNSET: - errors.append("datasource is required for creation") - if self.datasource_qualified_name is UNSET: - errors.append("datasource_qualified_name is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauCalculatedField validation failed: {errors}") - - def minimize(self) -> "TableauCalculatedField": - """ - Return a minimal copy of this TableauCalculatedField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauCalculatedField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauCalculatedField instance with only the minimum required fields. - """ - self.validate() - return TableauCalculatedField( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedTableauCalculatedField": - """ - Create a :class:`RelatedTableauCalculatedField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauCalculatedField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauCalculatedField(guid=self.guid) - return RelatedTableauCalculatedField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -672,9 +599,6 @@ def _tableau_calculated_field_to_nested( is_incomplete=tableau_calculated_field.is_incomplete, provenance_type=tableau_calculated_field.provenance_type, home_id=tableau_calculated_field.home_id, - depth=tableau_calculated_field.depth, - immediate_upstream=tableau_calculated_field.immediate_upstream, - immediate_downstream=tableau_calculated_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -710,6 +634,7 @@ def _tableau_calculated_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -718,9 +643,6 @@ def _tableau_calculated_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_calculated_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_dashboard.py b/pyatlan_v9/model/assets/tableau_dashboard.py index 101d92443..9ecf9a082 100644 --- a/pyatlan_v9/model/assets/tableau_dashboard.py +++ b/pyatlan_v9/model/assets/tableau_dashboard.py @@ -108,6 +108,8 @@ class TableauDashboard(Asset): TABLEAU_PARENT_DASHBOARDS: ClassVar[Any] = None TABLEAU_DASHBOARD_FIELDS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauDashboard" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this dashboard exists.""" @@ -250,78 +252,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workbook is UNSET: - errors.append("workbook is required for creation") - if self.workbook_qualified_name is UNSET: - errors.append("workbook_qualified_name is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauDashboard validation failed: {errors}") - - def minimize(self) -> "TableauDashboard": - """ - Return a minimal copy of this TableauDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauDashboard instance with only the minimum required fields. - """ - self.validate() - return TableauDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauDashboard": - """ - Create a :class:`RelatedTableauDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauDashboard(guid=self.guid) - return RelatedTableauDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -638,9 +568,6 @@ def _tableau_dashboard_to_nested( is_incomplete=tableau_dashboard.is_incomplete, provenance_type=tableau_dashboard.provenance_type, home_id=tableau_dashboard.home_id, - depth=tableau_dashboard.depth, - immediate_upstream=tableau_dashboard.immediate_upstream, - immediate_downstream=tableau_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -674,6 +601,7 @@ def _tableau_dashboard_from_nested(nested: TableauDashboardNested) -> TableauDas updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -682,9 +610,6 @@ def _tableau_dashboard_from_nested(nested: TableauDashboardNested) -> TableauDas is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_dashboard_field.py b/pyatlan_v9/model/assets/tableau_dashboard_field.py index 5aed4e3a0..061f26da8 100644 --- a/pyatlan_v9/model/assets/tableau_dashboard_field.py +++ b/pyatlan_v9/model/assets/tableau_dashboard_field.py @@ -50,11 +50,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import ( - RelatedTableauDashboard, - RelatedTableauDashboardField, - RelatedTableauWorksheetField, -) +from .tableau_related import RelatedTableauDashboard, RelatedTableauWorksheetField # ============================================================================= # FLAT ASSET CLASS @@ -114,6 +110,8 @@ class TableauDashboardField(Asset): TABLEAU_WORKSHEET_FIELD: ClassVar[Any] = None TABLEAU_DASHBOARD: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauDashboardField" + tableau_site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this dashboard field exists.""" @@ -277,80 +275,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauDashboardField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.tableau_dashboard is UNSET: - errors.append("tableau_dashboard is required for creation") - if self.tableau_dashboard_qualified_name is UNSET: - errors.append( - "tableau_dashboard_qualified_name is required for creation" - ) - if self.tableau_project_qualified_name is UNSET: - errors.append("tableau_project_qualified_name is required for creation") - if self.tableau_site_qualified_name is UNSET: - errors.append("tableau_site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauDashboardField validation failed: {errors}") - - def minimize(self) -> "TableauDashboardField": - """ - Return a minimal copy of this TableauDashboardField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauDashboardField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauDashboardField instance with only the minimum required fields. - """ - self.validate() - return TableauDashboardField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauDashboardField": - """ - Create a :class:`RelatedTableauDashboardField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauDashboardField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauDashboardField(guid=self.guid) - return RelatedTableauDashboardField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -725,9 +649,6 @@ def _tableau_dashboard_field_to_nested( is_incomplete=tableau_dashboard_field.is_incomplete, provenance_type=tableau_dashboard_field.provenance_type, home_id=tableau_dashboard_field.home_id, - depth=tableau_dashboard_field.depth, - immediate_upstream=tableau_dashboard_field.immediate_upstream, - immediate_downstream=tableau_dashboard_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -763,6 +684,7 @@ def _tableau_dashboard_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -771,9 +693,6 @@ def _tableau_dashboard_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_dashboard_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_datasource.py b/pyatlan_v9/model/assets/tableau_datasource.py index 8d7be003e..d7c6e031e 100644 --- a/pyatlan_v9/model/assets/tableau_datasource.py +++ b/pyatlan_v9/model/assets/tableau_datasource.py @@ -51,7 +51,6 @@ from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .tableau_related import ( - RelatedTableauDatasource, RelatedTableauDatasourceField, RelatedTableauProject, RelatedTableauWorkbook, @@ -114,6 +113,8 @@ class TableauDatasource(Asset): WORKBOOK: ClassVar[Any] = None FIELDS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauDatasource" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this datasource exists.""" @@ -268,76 +269,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauDatasource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauDatasource validation failed: {errors}") - - def minimize(self) -> "TableauDatasource": - """ - Return a minimal copy of this TableauDatasource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauDatasource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauDatasource instance with only the minimum required fields. - """ - self.validate() - return TableauDatasource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauDatasource": - """ - Create a :class:`RelatedTableauDatasource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauDatasource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauDatasource(guid=self.guid) - return RelatedTableauDatasource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -680,9 +611,6 @@ def _tableau_datasource_to_nested( is_incomplete=tableau_datasource.is_incomplete, provenance_type=tableau_datasource.provenance_type, home_id=tableau_datasource.home_id, - depth=tableau_datasource.depth, - immediate_upstream=tableau_datasource.immediate_upstream, - immediate_downstream=tableau_datasource.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -718,6 +646,7 @@ def _tableau_datasource_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -726,9 +655,6 @@ def _tableau_datasource_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_datasource_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_datasource_field.py b/pyatlan_v9/model/assets/tableau_datasource_field.py index 30e57321e..b56589695 100644 --- a/pyatlan_v9/model/assets/tableau_datasource_field.py +++ b/pyatlan_v9/model/assets/tableau_datasource_field.py @@ -52,7 +52,6 @@ from .spark_related import RelatedSparkJob from .tableau_related import ( RelatedTableauDatasource, - RelatedTableauDatasourceField, RelatedTableauWorksheet, RelatedTableauWorksheetField, ) @@ -117,6 +116,8 @@ class TableauDatasourceField(Asset): WORKSHEETS: ClassVar[Any] = None TABLEAU_WORKSHEET_FIELD: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauDatasourceField" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this datasource field exists.""" @@ -282,80 +283,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauDatasourceField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.datasource is UNSET: - errors.append("datasource is required for creation") - if self.datasource_qualified_name is UNSET: - errors.append("datasource_qualified_name is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauDatasourceField validation failed: {errors}") - - def minimize(self) -> "TableauDatasourceField": - """ - Return a minimal copy of this TableauDatasourceField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauDatasourceField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauDatasourceField instance with only the minimum required fields. - """ - self.validate() - return TableauDatasourceField( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedTableauDatasourceField": - """ - Create a :class:`RelatedTableauDatasourceField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauDatasourceField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauDatasourceField(guid=self.guid) - return RelatedTableauDatasourceField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -725,9 +652,6 @@ def _tableau_datasource_field_to_nested( is_incomplete=tableau_datasource_field.is_incomplete, provenance_type=tableau_datasource_field.provenance_type, home_id=tableau_datasource_field.home_id, - depth=tableau_datasource_field.depth, - immediate_upstream=tableau_datasource_field.immediate_upstream, - immediate_downstream=tableau_datasource_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -763,6 +687,7 @@ def _tableau_datasource_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -771,9 +696,6 @@ def _tableau_datasource_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_datasource_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_flow.py b/pyatlan_v9/model/assets/tableau_flow.py index d5a67b38f..ffc65f02d 100644 --- a/pyatlan_v9/model/assets/tableau_flow.py +++ b/pyatlan_v9/model/assets/tableau_flow.py @@ -50,7 +50,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import RelatedTableauFlow, RelatedTableauProject +from .tableau_related import RelatedTableauProject # ============================================================================= # FLAT ASSET CLASS @@ -101,6 +101,8 @@ class TableauFlow(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None PROJECT: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauFlow" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this flow exists.""" @@ -231,76 +233,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauFlow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauFlow validation failed: {errors}") - - def minimize(self) -> "TableauFlow": - """ - Return a minimal copy of this TableauFlow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauFlow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauFlow instance with only the minimum required fields. - """ - self.validate() - return TableauFlow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauFlow": - """ - Create a :class:`RelatedTableauFlow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauFlow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauFlow(guid=self.guid) - return RelatedTableauFlow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -597,9 +529,6 @@ def _tableau_flow_to_nested(tableau_flow: TableauFlow) -> TableauFlowNested: is_incomplete=tableau_flow.is_incomplete, provenance_type=tableau_flow.provenance_type, home_id=tableau_flow.home_id, - depth=tableau_flow.depth, - immediate_upstream=tableau_flow.immediate_upstream, - immediate_downstream=tableau_flow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -631,6 +560,7 @@ def _tableau_flow_from_nested(nested: TableauFlowNested) -> TableauFlow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -639,9 +569,6 @@ def _tableau_flow_from_nested(nested: TableauFlowNested) -> TableauFlow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_flow_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_metric.py b/pyatlan_v9/model/assets/tableau_metric.py index 9d4ef1288..746f243e6 100644 --- a/pyatlan_v9/model/assets/tableau_metric.py +++ b/pyatlan_v9/model/assets/tableau_metric.py @@ -50,7 +50,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import RelatedTableauMetric, RelatedTableauProject +from .tableau_related import RelatedTableauProject # ============================================================================= # FLAT ASSET CLASS @@ -98,6 +98,8 @@ class TableauMetric(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None PROJECT: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauMetric" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this metric exists.""" @@ -219,76 +221,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauMetric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauMetric validation failed: {errors}") - - def minimize(self) -> "TableauMetric": - """ - Return a minimal copy of this TableauMetric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauMetric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauMetric instance with only the minimum required fields. - """ - self.validate() - return TableauMetric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauMetric": - """ - Create a :class:`RelatedTableauMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauMetric(guid=self.guid) - return RelatedTableauMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -572,9 +504,6 @@ def _tableau_metric_to_nested(tableau_metric: TableauMetric) -> TableauMetricNes is_incomplete=tableau_metric.is_incomplete, provenance_type=tableau_metric.provenance_type, home_id=tableau_metric.home_id, - depth=tableau_metric.depth, - immediate_upstream=tableau_metric.immediate_upstream, - immediate_downstream=tableau_metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -608,6 +537,7 @@ def _tableau_metric_from_nested(nested: TableauMetricNested) -> TableauMetric: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -616,9 +546,6 @@ def _tableau_metric_from_nested(nested: TableauMetricNested) -> TableauMetric: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_metric_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_project.py b/pyatlan_v9/model/assets/tableau_project.py index 8d0097269..37b0dfbbc 100644 --- a/pyatlan_v9/model/assets/tableau_project.py +++ b/pyatlan_v9/model/assets/tableau_project.py @@ -109,6 +109,8 @@ class TableauProject(Asset): FLOWS: ClassVar[Any] = None WORKBOOKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauProject" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this project exists.""" @@ -243,74 +245,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.site is UNSET: - errors.append("site is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauProject validation failed: {errors}") - - def minimize(self) -> "TableauProject": - """ - Return a minimal copy of this TableauProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauProject instance with only the minimum required fields. - """ - self.validate() - return TableauProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauProject": - """ - Create a :class:`RelatedTableauProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauProject(guid=self.guid) - return RelatedTableauProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -616,9 +550,6 @@ def _tableau_project_to_nested(tableau_project: TableauProject) -> TableauProjec is_incomplete=tableau_project.is_incomplete, provenance_type=tableau_project.provenance_type, home_id=tableau_project.home_id, - depth=tableau_project.depth, - immediate_upstream=tableau_project.immediate_upstream, - immediate_downstream=tableau_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -652,6 +583,7 @@ def _tableau_project_from_nested(nested: TableauProjectNested) -> TableauProject updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -660,9 +592,6 @@ def _tableau_project_from_nested(nested: TableauProjectNested) -> TableauProject is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_project_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_site.py b/pyatlan_v9/model/assets/tableau_site.py index 08ce99c0f..418093d00 100644 --- a/pyatlan_v9/model/assets/tableau_site.py +++ b/pyatlan_v9/model/assets/tableau_site.py @@ -49,7 +49,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import RelatedTableauProject, RelatedTableauSite +from .tableau_related import RelatedTableauProject # ============================================================================= # FLAT ASSET CLASS @@ -93,6 +93,8 @@ class TableauSite(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None PROJECTS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauSite" + tableau_project_hierarchy_qualified_names: Union[List[str], None, UnsetType] = UNSET """Array of qualified names representing the project hierarchy for this Tableau asset.""" @@ -194,66 +196,6 @@ class TableauSite(Asset): def __post_init__(self) -> None: self.type_name = "TableauSite" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauSite instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"TableauSite validation failed: {errors}") - - def minimize(self) -> "TableauSite": - """ - Return a minimal copy of this TableauSite with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauSite with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauSite instance with only the minimum required fields. - """ - self.validate() - return TableauSite(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauSite": - """ - Create a :class:`RelatedTableauSite` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauSite reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauSite(guid=self.guid) - return RelatedTableauSite(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -515,9 +457,6 @@ def _tableau_site_to_nested(tableau_site: TableauSite) -> TableauSiteNested: is_incomplete=tableau_site.is_incomplete, provenance_type=tableau_site.provenance_type, home_id=tableau_site.home_id, - depth=tableau_site.depth, - immediate_upstream=tableau_site.immediate_upstream, - immediate_downstream=tableau_site.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -549,6 +488,7 @@ def _tableau_site_from_nested(nested: TableauSiteNested) -> TableauSite: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -557,9 +497,6 @@ def _tableau_site_from_nested(nested: TableauSiteNested) -> TableauSite: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_site_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_workbook.py b/pyatlan_v9/model/assets/tableau_workbook.py index 676136eb9..28c9d6fd1 100644 --- a/pyatlan_v9/model/assets/tableau_workbook.py +++ b/pyatlan_v9/model/assets/tableau_workbook.py @@ -54,7 +54,6 @@ RelatedTableauDashboard, RelatedTableauDatasource, RelatedTableauProject, - RelatedTableauWorkbook, RelatedTableauWorksheet, ) @@ -108,6 +107,8 @@ class TableauWorkbook(Asset): WORKSHEETS: ClassVar[Any] = None PROJECT: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauWorkbook" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this workbook exists.""" @@ -241,76 +242,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauWorkbook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauWorkbook validation failed: {errors}") - - def minimize(self) -> "TableauWorkbook": - """ - Return a minimal copy of this TableauWorkbook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauWorkbook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauWorkbook instance with only the minimum required fields. - """ - self.validate() - return TableauWorkbook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauWorkbook": - """ - Create a :class:`RelatedTableauWorkbook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauWorkbook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauWorkbook(guid=self.guid) - return RelatedTableauWorkbook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -617,9 +548,6 @@ def _tableau_workbook_to_nested( is_incomplete=tableau_workbook.is_incomplete, provenance_type=tableau_workbook.provenance_type, home_id=tableau_workbook.home_id, - depth=tableau_workbook.depth, - immediate_upstream=tableau_workbook.immediate_upstream, - immediate_downstream=tableau_workbook.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -653,6 +581,7 @@ def _tableau_workbook_from_nested(nested: TableauWorkbookNested) -> TableauWorkb updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -661,9 +590,6 @@ def _tableau_workbook_from_nested(nested: TableauWorkbookNested) -> TableauWorkb is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_workbook_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_worksheet.py b/pyatlan_v9/model/assets/tableau_worksheet.py index e9a0b3037..51a727061 100644 --- a/pyatlan_v9/model/assets/tableau_worksheet.py +++ b/pyatlan_v9/model/assets/tableau_worksheet.py @@ -55,7 +55,6 @@ RelatedTableauDashboard, RelatedTableauDatasourceField, RelatedTableauWorkbook, - RelatedTableauWorksheet, RelatedTableauWorksheetField, ) @@ -110,6 +109,8 @@ class TableauWorksheet(Asset): CALCULATED_FIELDS: ClassVar[Any] = None WORKBOOK: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauWorksheet" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this worksheet exists.""" @@ -252,78 +253,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauWorksheet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workbook is UNSET: - errors.append("workbook is required for creation") - if self.workbook_qualified_name is UNSET: - errors.append("workbook_qualified_name is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauWorksheet validation failed: {errors}") - - def minimize(self) -> "TableauWorksheet": - """ - Return a minimal copy of this TableauWorksheet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauWorksheet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauWorksheet instance with only the minimum required fields. - """ - self.validate() - return TableauWorksheet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauWorksheet": - """ - Create a :class:`RelatedTableauWorksheet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauWorksheet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauWorksheet(guid=self.guid) - return RelatedTableauWorksheet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -640,9 +569,6 @@ def _tableau_worksheet_to_nested( is_incomplete=tableau_worksheet.is_incomplete, provenance_type=tableau_worksheet.provenance_type, home_id=tableau_worksheet.home_id, - depth=tableau_worksheet.depth, - immediate_upstream=tableau_worksheet.immediate_upstream, - immediate_downstream=tableau_worksheet.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -676,6 +602,7 @@ def _tableau_worksheet_from_nested(nested: TableauWorksheetNested) -> TableauWor updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -684,9 +611,6 @@ def _tableau_worksheet_from_nested(nested: TableauWorksheetNested) -> TableauWor is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_worksheet_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tableau_worksheet_field.py b/pyatlan_v9/model/assets/tableau_worksheet_field.py index e3d78d591..fb13dd56b 100644 --- a/pyatlan_v9/model/assets/tableau_worksheet_field.py +++ b/pyatlan_v9/model/assets/tableau_worksheet_field.py @@ -55,7 +55,6 @@ RelatedTableauDashboardField, RelatedTableauDatasourceField, RelatedTableauWorksheet, - RelatedTableauWorksheetField, ) # ============================================================================= @@ -119,6 +118,8 @@ class TableauWorksheetField(Asset): TABLEAU_CALCULATED_FIELD: ClassVar[Any] = None TABLEAU_WORKSHEET: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauWorksheetField" + tableau_site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this worksheet field exists.""" @@ -297,84 +298,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauWorksheetField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.tableau_worksheet is UNSET: - errors.append("tableau_worksheet is required for creation") - if self.tableau_worksheet_qualified_name is UNSET: - errors.append( - "tableau_worksheet_qualified_name is required for creation" - ) - if self.tableau_workbook_qualified_name is UNSET: - errors.append( - "tableau_workbook_qualified_name is required for creation" - ) - if self.tableau_project_qualified_name is UNSET: - errors.append("tableau_project_qualified_name is required for creation") - if self.tableau_site_qualified_name is UNSET: - errors.append("tableau_site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauWorksheetField validation failed: {errors}") - - def minimize(self) -> "TableauWorksheetField": - """ - Return a minimal copy of this TableauWorksheetField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauWorksheetField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauWorksheetField instance with only the minimum required fields. - """ - self.validate() - return TableauWorksheetField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauWorksheetField": - """ - Create a :class:`RelatedTableauWorksheetField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauWorksheetField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauWorksheetField(guid=self.guid) - return RelatedTableauWorksheetField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -772,9 +695,6 @@ def _tableau_worksheet_field_to_nested( is_incomplete=tableau_worksheet_field.is_incomplete, provenance_type=tableau_worksheet_field.provenance_type, home_id=tableau_worksheet_field.home_id, - depth=tableau_worksheet_field.depth, - immediate_upstream=tableau_worksheet_field.immediate_upstream, - immediate_downstream=tableau_worksheet_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -810,6 +730,7 @@ def _tableau_worksheet_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -818,9 +739,6 @@ def _tableau_worksheet_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_worksheet_field_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/tag.py b/pyatlan_v9/model/assets/tag.py index a9ed9e499..9ecbc5af6 100644 --- a/pyatlan_v9/model/assets/tag.py +++ b/pyatlan_v9/model/assets/tag.py @@ -49,7 +49,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tag_related import RelatedTag # ============================================================================= # FLAT ASSET CLASS @@ -95,6 +94,8 @@ class Tag(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Tag" + tag_id: Union[str, None, UnsetType] = UNSET """Unique identifier of the tag in the source system.""" @@ -202,73 +203,6 @@ class Tag(Asset): def __post_init__(self) -> None: self.type_name = "Tag" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Tag instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.tag_id is UNSET: - errors.append("tag_id is required for creation") - if self.tag_allowed_values is UNSET: - errors.append("tag_allowed_values is required for creation") - if self.mapped_classification_name is UNSET: - errors.append("mapped_classification_name is required for creation") - if errors: - raise ValueError(f"Tag validation failed: {errors}") - - def minimize(self) -> "Tag": - """ - Return a minimal copy of this Tag with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Tag with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Tag instance with only the minimum required fields. - """ - self.validate() - return Tag(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTag": - """ - Create a :class:`RelatedTag` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTag reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTag(guid=self.guid) - return RelatedTag(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -531,9 +465,6 @@ def _tag_to_nested(tag: Tag) -> TagNested: is_incomplete=tag.is_incomplete, provenance_type=tag.provenance_type, home_id=tag.home_id, - depth=tag.depth, - immediate_upstream=tag.immediate_upstream, - immediate_downstream=tag.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -563,6 +494,7 @@ def _tag_from_nested(nested: TagNested) -> Tag: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -571,9 +503,6 @@ def _tag_from_nested(nested: TagNested) -> Tag: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tag_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/task.py b/pyatlan_v9/model/assets/task.py index 952aa4d21..987fb5868 100644 --- a/pyatlan_v9/model/assets/task.py +++ b/pyatlan_v9/model/assets/task.py @@ -44,7 +44,6 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck -from .task_related import RelatedTask # ============================================================================= # FLAT ASSET CLASS @@ -90,6 +89,8 @@ class Task(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Task" + task_recipient: Union[str, None, UnsetType] = UNSET """Recipient of the task.""" @@ -195,66 +196,6 @@ class Task(Asset): def __post_init__(self) -> None: self.type_name = "Task" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Task instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Task validation failed: {errors}") - - def minimize(self) -> "Task": - """ - Return a minimal copy of this Task with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Task with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Task instance with only the minimum required fields. - """ - self.validate() - return Task(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTask": - """ - Create a :class:`RelatedTask` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTask reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTask(guid=self.guid) - return RelatedTask(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -525,9 +466,6 @@ def _task_to_nested(task: Task) -> TaskNested: is_incomplete=task.is_incomplete, provenance_type=task.provenance_type, home_id=task.home_id, - depth=task.depth, - immediate_upstream=task.immediate_upstream, - immediate_downstream=task.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -557,6 +495,7 @@ def _task_from_nested(nested: TaskNested) -> Task: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -565,9 +504,6 @@ def _task_from_nested(nested: TaskNested) -> Task: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_task_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/thoughtspot.py b/pyatlan_v9/model/assets/thoughtspot.py index 2db8ae57b..f4a5f6d49 100644 --- a/pyatlan_v9/model/assets/thoughtspot.py +++ b/pyatlan_v9/model/assets/thoughtspot.py @@ -49,7 +49,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspot # ============================================================================= # FLAT ASSET CLASS @@ -95,6 +94,8 @@ class Thoughtspot(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Thoughtspot" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -202,66 +203,6 @@ class Thoughtspot(Asset): def __post_init__(self) -> None: self.type_name = "Thoughtspot" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Thoughtspot instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Thoughtspot validation failed: {errors}") - - def minimize(self) -> "Thoughtspot": - """ - Return a minimal copy of this Thoughtspot with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Thoughtspot with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Thoughtspot instance with only the minimum required fields. - """ - self.validate() - return Thoughtspot(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspot": - """ - Create a :class:`RelatedThoughtspot` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspot reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspot(guid=self.guid) - return RelatedThoughtspot(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -528,9 +469,6 @@ def _thoughtspot_to_nested(thoughtspot: Thoughtspot) -> ThoughtspotNested: is_incomplete=thoughtspot.is_incomplete, provenance_type=thoughtspot.provenance_type, home_id=thoughtspot.home_id, - depth=thoughtspot.depth, - immediate_upstream=thoughtspot.immediate_upstream, - immediate_downstream=thoughtspot.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -562,6 +500,7 @@ def _thoughtspot_from_nested(nested: ThoughtspotNested) -> Thoughtspot: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -570,9 +509,6 @@ def _thoughtspot_from_nested(nested: ThoughtspotNested) -> Thoughtspot: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/thoughtspot_answer.py b/pyatlan_v9/model/assets/thoughtspot_answer.py index a2d4ac1ae..c0a1cbc15 100644 --- a/pyatlan_v9/model/assets/thoughtspot_answer.py +++ b/pyatlan_v9/model/assets/thoughtspot_answer.py @@ -49,7 +49,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotAnswer # ============================================================================= # FLAT ASSET CLASS @@ -95,6 +94,8 @@ class ThoughtspotAnswer(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotAnswer" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -202,66 +203,6 @@ class ThoughtspotAnswer(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotAnswer" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotAnswer instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotAnswer validation failed: {errors}") - - def minimize(self) -> "ThoughtspotAnswer": - """ - Return a minimal copy of this ThoughtspotAnswer with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotAnswer with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotAnswer instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotAnswer(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotAnswer": - """ - Create a :class:`RelatedThoughtspotAnswer` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotAnswer reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotAnswer(guid=self.guid) - return RelatedThoughtspotAnswer(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -538,9 +479,6 @@ def _thoughtspot_answer_to_nested( is_incomplete=thoughtspot_answer.is_incomplete, provenance_type=thoughtspot_answer.provenance_type, home_id=thoughtspot_answer.home_id, - depth=thoughtspot_answer.depth, - immediate_upstream=thoughtspot_answer.immediate_upstream, - immediate_downstream=thoughtspot_answer.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -576,6 +514,7 @@ def _thoughtspot_answer_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -584,9 +523,6 @@ def _thoughtspot_answer_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_answer_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/thoughtspot_column.py b/pyatlan_v9/model/assets/thoughtspot_column.py index 281b71114..602cdaa11 100644 --- a/pyatlan_v9/model/assets/thoughtspot_column.py +++ b/pyatlan_v9/model/assets/thoughtspot_column.py @@ -51,7 +51,6 @@ from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .thoughtspot_related import ( - RelatedThoughtspotColumn, RelatedThoughtspotTable, RelatedThoughtspotView, RelatedThoughtspotWorksheet, @@ -71,8 +70,8 @@ class ThoughtspotColumn(Asset): THOUGHTSPOT_TABLE_QUALIFIED_NAME: ClassVar[Any] = None THOUGHTSPOT_VIEW_QUALIFIED_NAME: ClassVar[Any] = None THOUGHTSPOT_WORKSHEET_QUALIFIED_NAME: ClassVar[Any] = None - THOUGHTSPOT_COLUMN_DATA_TYPE: ClassVar[Any] = None - THOUGHTSPOT_COLUMN_TYPE: ClassVar[Any] = None + THOUGHTSPOT_DATA_TYPE: ClassVar[Any] = None + THOUGHTSPOT_TYPE: ClassVar[Any] = None THOUGHTSPOT_CHART_TYPE: ClassVar[Any] = None THOUGHTSPOT_QUESTION_TEXT: ClassVar[Any] = None THOUGHTSPOT_JOIN_COUNT: ClassVar[Any] = None @@ -109,6 +108,8 @@ class ThoughtspotColumn(Asset): THOUGHTSPOT_VIEW: ClassVar[Any] = None THOUGHTSPOT_WORKSHEET: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotColumn" + thoughtspot_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the table in which this column exists.""" @@ -118,10 +119,10 @@ class ThoughtspotColumn(Asset): thoughtspot_worksheet_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the worksheet in which this column exists.""" - thoughtspot_column_data_type: Union[str, None, UnsetType] = UNSET + thoughtspot_data_type: Union[str, None, UnsetType] = UNSET """Specifies the technical format of data stored in a column such as integer, float, string, date, boolean etc.""" - thoughtspot_column_type: Union[str, None, UnsetType] = UNSET + thoughtspot_type: Union[str, None, UnsetType] = UNSET """Defines the analytical role of a column in data analysis categorizing it as a dimension, measure, or attribute.""" thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET @@ -246,76 +247,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.thoughtspot_table is UNSET: - errors.append("thoughtspot_table is required for creation") - if self.thoughtspot_table_qualified_name is UNSET: - errors.append( - "thoughtspot_table_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"ThoughtspotColumn validation failed: {errors}") - - def minimize(self) -> "ThoughtspotColumn": - """ - Return a minimal copy of this ThoughtspotColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotColumn instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotColumn": - """ - Create a :class:`RelatedThoughtspotColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotColumn(guid=self.guid) - return RelatedThoughtspotColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -382,10 +313,10 @@ class ThoughtspotColumnAttributes(AssetAttributes): thoughtspot_worksheet_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the worksheet in which this column exists.""" - thoughtspot_column_data_type: Union[str, None, UnsetType] = UNSET + thoughtspot_data_type: Union[str, None, UnsetType] = UNSET """Specifies the technical format of data stored in a column such as integer, float, string, date, boolean etc.""" - thoughtspot_column_type: Union[str, None, UnsetType] = UNSET + thoughtspot_type: Union[str, None, UnsetType] = UNSET """Defines the analytical role of a column in data analysis categorizing it as a dimension, measure, or attribute.""" thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET @@ -571,8 +502,8 @@ def _populate_thoughtspot_column_attrs( attrs.thoughtspot_worksheet_qualified_name = ( obj.thoughtspot_worksheet_qualified_name ) - attrs.thoughtspot_column_data_type = obj.thoughtspot_column_data_type - attrs.thoughtspot_column_type = obj.thoughtspot_column_type + attrs.thoughtspot_data_type = obj.thoughtspot_data_type + attrs.thoughtspot_type = obj.thoughtspot_type attrs.thoughtspot_chart_type = obj.thoughtspot_chart_type attrs.thoughtspot_question_text = obj.thoughtspot_question_text attrs.thoughtspot_join_count = obj.thoughtspot_join_count @@ -587,8 +518,8 @@ def _extract_thoughtspot_column_attrs(attrs: ThoughtspotColumnAttributes) -> dic result["thoughtspot_worksheet_qualified_name"] = ( attrs.thoughtspot_worksheet_qualified_name ) - result["thoughtspot_column_data_type"] = attrs.thoughtspot_column_data_type - result["thoughtspot_column_type"] = attrs.thoughtspot_column_type + result["thoughtspot_data_type"] = attrs.thoughtspot_data_type + result["thoughtspot_type"] = attrs.thoughtspot_type result["thoughtspot_chart_type"] = attrs.thoughtspot_chart_type result["thoughtspot_question_text"] = attrs.thoughtspot_question_text result["thoughtspot_join_count"] = attrs.thoughtspot_join_count @@ -633,9 +564,6 @@ def _thoughtspot_column_to_nested( is_incomplete=thoughtspot_column.is_incomplete, provenance_type=thoughtspot_column.provenance_type, home_id=thoughtspot_column.home_id, - depth=thoughtspot_column.depth, - immediate_upstream=thoughtspot_column.immediate_upstream, - immediate_downstream=thoughtspot_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -671,6 +599,7 @@ def _thoughtspot_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -679,9 +608,6 @@ def _thoughtspot_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -728,12 +654,10 @@ def _thoughtspot_column_from_nested_bytes( "thoughtspotWorksheetQualifiedName", "thoughtspotWorksheetQualifiedName.text", ) -ThoughtspotColumn.THOUGHTSPOT_COLUMN_DATA_TYPE = KeywordField( - "thoughtspotColumnDataType", "thoughtspotColumnDataType" -) -ThoughtspotColumn.THOUGHTSPOT_COLUMN_TYPE = KeywordField( - "thoughtspotColumnType", "thoughtspotColumnType" +ThoughtspotColumn.THOUGHTSPOT_DATA_TYPE = KeywordField( + "thoughtspotDataType", "thoughtspotDataType" ) +ThoughtspotColumn.THOUGHTSPOT_TYPE = KeywordField("thoughtspotType", "thoughtspotType") ThoughtspotColumn.THOUGHTSPOT_CHART_TYPE = KeywordField( "thoughtspotChartType", "thoughtspotChartType" ) diff --git a/pyatlan_v9/model/assets/thoughtspot_dashlet.py b/pyatlan_v9/model/assets/thoughtspot_dashlet.py index 2cce5db23..1bdba4743 100644 --- a/pyatlan_v9/model/assets/thoughtspot_dashlet.py +++ b/pyatlan_v9/model/assets/thoughtspot_dashlet.py @@ -50,7 +50,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotDashlet, RelatedThoughtspotLiveboard +from .thoughtspot_related import RelatedThoughtspotLiveboard # ============================================================================= # FLAT ASSET CLASS @@ -99,6 +99,8 @@ class ThoughtspotDashlet(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_LIVEBOARD: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotDashlet" + thoughtspot_liveboard_name: Union[str, None, UnsetType] = UNSET """Simple name of the liveboard in which this dashlet exists.""" @@ -221,78 +223,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotDashlet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.thoughtspot_liveboard is UNSET: - errors.append("thoughtspot_liveboard is required for creation") - if self.thoughtspot_liveboard_name is UNSET: - errors.append("thoughtspot_liveboard_name is required for creation") - if self.thoughtspot_liveboard_qualified_name is UNSET: - errors.append( - "thoughtspot_liveboard_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"ThoughtspotDashlet validation failed: {errors}") - - def minimize(self) -> "ThoughtspotDashlet": - """ - Return a minimal copy of this ThoughtspotDashlet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotDashlet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotDashlet instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotDashlet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotDashlet": - """ - Create a :class:`RelatedThoughtspotDashlet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotDashlet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotDashlet(guid=self.guid) - return RelatedThoughtspotDashlet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -587,9 +517,6 @@ def _thoughtspot_dashlet_to_nested( is_incomplete=thoughtspot_dashlet.is_incomplete, provenance_type=thoughtspot_dashlet.provenance_type, home_id=thoughtspot_dashlet.home_id, - depth=thoughtspot_dashlet.depth, - immediate_upstream=thoughtspot_dashlet.immediate_upstream, - immediate_downstream=thoughtspot_dashlet.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -625,6 +552,7 @@ def _thoughtspot_dashlet_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -633,9 +561,6 @@ def _thoughtspot_dashlet_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_dashlet_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/thoughtspot_liveboard.py b/pyatlan_v9/model/assets/thoughtspot_liveboard.py index b9127658f..0f26bdbc2 100644 --- a/pyatlan_v9/model/assets/thoughtspot_liveboard.py +++ b/pyatlan_v9/model/assets/thoughtspot_liveboard.py @@ -49,7 +49,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotDashlet, RelatedThoughtspotLiveboard +from .thoughtspot_related import RelatedThoughtspotDashlet # ============================================================================= # FLAT ASSET CLASS @@ -96,6 +96,8 @@ class ThoughtspotLiveboard(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_DASHLETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotLiveboard" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -208,66 +210,6 @@ class ThoughtspotLiveboard(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotLiveboard" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotLiveboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotLiveboard validation failed: {errors}") - - def minimize(self) -> "ThoughtspotLiveboard": - """ - Return a minimal copy of this ThoughtspotLiveboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotLiveboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotLiveboard instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotLiveboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotLiveboard": - """ - Create a :class:`RelatedThoughtspotLiveboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotLiveboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotLiveboard(guid=self.guid) - return RelatedThoughtspotLiveboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -550,9 +492,6 @@ def _thoughtspot_liveboard_to_nested( is_incomplete=thoughtspot_liveboard.is_incomplete, provenance_type=thoughtspot_liveboard.provenance_type, home_id=thoughtspot_liveboard.home_id, - depth=thoughtspot_liveboard.depth, - immediate_upstream=thoughtspot_liveboard.immediate_upstream, - immediate_downstream=thoughtspot_liveboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -588,6 +527,7 @@ def _thoughtspot_liveboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -596,9 +536,6 @@ def _thoughtspot_liveboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_liveboard_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/thoughtspot_related.py b/pyatlan_v9/model/assets/thoughtspot_related.py index aacb3b26e..c48b6705e 100644 --- a/pyatlan_v9/model/assets/thoughtspot_related.py +++ b/pyatlan_v9/model/assets/thoughtspot_related.py @@ -172,10 +172,10 @@ class RelatedThoughtspotColumn(RelatedThoughtspot): thoughtspot_worksheet_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the worksheet in which this column exists.""" - thoughtspot_column_data_type: Union[str, None, UnsetType] = UNSET + thoughtspot_data_type: Union[str, None, UnsetType] = UNSET """Specifies the technical format of data stored in a column such as integer, float, string, date, boolean etc.""" - thoughtspot_column_type: Union[str, None, UnsetType] = UNSET + thoughtspot_type: Union[str, None, UnsetType] = UNSET """Defines the analytical role of a column in data analysis categorizing it as a dimension, measure, or attribute.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/thoughtspot_table.py b/pyatlan_v9/model/assets/thoughtspot_table.py index 1af9e320d..968b4e0ba 100644 --- a/pyatlan_v9/model/assets/thoughtspot_table.py +++ b/pyatlan_v9/model/assets/thoughtspot_table.py @@ -49,7 +49,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotColumn, RelatedThoughtspotTable +from .thoughtspot_related import RelatedThoughtspotColumn # ============================================================================= # FLAT ASSET CLASS @@ -96,6 +96,8 @@ class ThoughtspotTable(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_COLUMNS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotTable" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -206,66 +208,6 @@ class ThoughtspotTable(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotTable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotTable validation failed: {errors}") - - def minimize(self) -> "ThoughtspotTable": - """ - Return a minimal copy of this ThoughtspotTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotTable instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotTable": - """ - Create a :class:`RelatedThoughtspotTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotTable(guid=self.guid) - return RelatedThoughtspotTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -546,9 +488,6 @@ def _thoughtspot_table_to_nested( is_incomplete=thoughtspot_table.is_incomplete, provenance_type=thoughtspot_table.provenance_type, home_id=thoughtspot_table.home_id, - depth=thoughtspot_table.depth, - immediate_upstream=thoughtspot_table.immediate_upstream, - immediate_downstream=thoughtspot_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -582,6 +521,7 @@ def _thoughtspot_table_from_nested(nested: ThoughtspotTableNested) -> Thoughtspo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -590,9 +530,6 @@ def _thoughtspot_table_from_nested(nested: ThoughtspotTableNested) -> Thoughtspo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_table_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/thoughtspot_view.py b/pyatlan_v9/model/assets/thoughtspot_view.py index 31e6b7daf..91503d325 100644 --- a/pyatlan_v9/model/assets/thoughtspot_view.py +++ b/pyatlan_v9/model/assets/thoughtspot_view.py @@ -49,7 +49,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotColumn, RelatedThoughtspotView +from .thoughtspot_related import RelatedThoughtspotColumn # ============================================================================= # FLAT ASSET CLASS @@ -96,6 +96,8 @@ class ThoughtspotView(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_COLUMNS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotView" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -206,66 +208,6 @@ class ThoughtspotView(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotView" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotView validation failed: {errors}") - - def minimize(self) -> "ThoughtspotView": - """ - Return a minimal copy of this ThoughtspotView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotView instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotView": - """ - Create a :class:`RelatedThoughtspotView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotView(guid=self.guid) - return RelatedThoughtspotView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -546,9 +488,6 @@ def _thoughtspot_view_to_nested( is_incomplete=thoughtspot_view.is_incomplete, provenance_type=thoughtspot_view.provenance_type, home_id=thoughtspot_view.home_id, - depth=thoughtspot_view.depth, - immediate_upstream=thoughtspot_view.immediate_upstream, - immediate_downstream=thoughtspot_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -582,6 +521,7 @@ def _thoughtspot_view_from_nested(nested: ThoughtspotViewNested) -> ThoughtspotV updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -590,9 +530,6 @@ def _thoughtspot_view_from_nested(nested: ThoughtspotViewNested) -> ThoughtspotV is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_view_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/thoughtspot_worksheet.py b/pyatlan_v9/model/assets/thoughtspot_worksheet.py index de8ceaded..c7bbcb258 100644 --- a/pyatlan_v9/model/assets/thoughtspot_worksheet.py +++ b/pyatlan_v9/model/assets/thoughtspot_worksheet.py @@ -49,7 +49,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotColumn, RelatedThoughtspotWorksheet +from .thoughtspot_related import RelatedThoughtspotColumn # ============================================================================= # FLAT ASSET CLASS @@ -96,6 +96,8 @@ class ThoughtspotWorksheet(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_COLUMNS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotWorksheet" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -206,66 +208,6 @@ class ThoughtspotWorksheet(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotWorksheet" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotWorksheet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotWorksheet validation failed: {errors}") - - def minimize(self) -> "ThoughtspotWorksheet": - """ - Return a minimal copy of this ThoughtspotWorksheet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotWorksheet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotWorksheet instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotWorksheet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotWorksheet": - """ - Create a :class:`RelatedThoughtspotWorksheet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotWorksheet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotWorksheet(guid=self.guid) - return RelatedThoughtspotWorksheet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -546,9 +488,6 @@ def _thoughtspot_worksheet_to_nested( is_incomplete=thoughtspot_worksheet.is_incomplete, provenance_type=thoughtspot_worksheet.provenance_type, home_id=thoughtspot_worksheet.home_id, - depth=thoughtspot_worksheet.depth, - immediate_upstream=thoughtspot_worksheet.immediate_upstream, - immediate_downstream=thoughtspot_worksheet.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -584,6 +523,7 @@ def _thoughtspot_worksheet_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -592,9 +532,6 @@ def _thoughtspot_worksheet_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_worksheet_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/view.py b/pyatlan_v9/model/assets/view.py index 570b58c37..48330cb00 100644 --- a/pyatlan_v9/model/assets/view.py +++ b/pyatlan_v9/model/assets/view.py @@ -59,7 +59,7 @@ from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_related import RelatedColumn, RelatedQuery, RelatedSchema, RelatedView +from .sql_related import RelatedColumn, RelatedQuery, RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -137,6 +137,8 @@ class View(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "View" + column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this view.""" @@ -354,80 +356,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this View instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"View validation failed: {errors}") - - def minimize(self) -> "View": - """ - Return a minimal copy of this View with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new View with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new View instance with only the minimum required fields. - """ - self.validate() - return View(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedView": - """ - Create a :class:`RelatedView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedView(guid=self.guid) - return RelatedView(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -948,9 +876,6 @@ def _view_to_nested(view: View) -> ViewNested: is_incomplete=view.is_incomplete, provenance_type=view.provenance_type, home_id=view.home_id, - depth=view.depth, - immediate_upstream=view.immediate_upstream, - immediate_downstream=view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -980,6 +905,7 @@ def _view_from_nested(nested: ViewNested) -> View: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -988,9 +914,6 @@ def _view_from_nested(nested: ViewNested) -> View: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_view_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/workflow.py b/pyatlan_v9/model/assets/workflow.py index e5732f174..4b8602f8f 100644 --- a/pyatlan_v9/model/assets/workflow.py +++ b/pyatlan_v9/model/assets/workflow.py @@ -44,7 +44,6 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck -from .workflow_related import RelatedWorkflow # ============================================================================= # FLAT ASSET CLASS @@ -85,6 +84,8 @@ class Workflow(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Workflow" + workflow_template_guid: Union[str, None, UnsetType] = UNSET """GUID of the workflow template from which this workflow was created.""" @@ -175,66 +176,6 @@ class Workflow(Asset): def __post_init__(self) -> None: self.type_name = "Workflow" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Workflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Workflow validation failed: {errors}") - - def minimize(self) -> "Workflow": - """ - Return a minimal copy of this Workflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Workflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Workflow instance with only the minimum required fields. - """ - self.validate() - return Workflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedWorkflow": - """ - Create a :class:`RelatedWorkflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedWorkflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedWorkflow(guid=self.guid) - return RelatedWorkflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -484,9 +425,6 @@ def _workflow_to_nested(workflow: Workflow) -> WorkflowNested: is_incomplete=workflow.is_incomplete, provenance_type=workflow.provenance_type, home_id=workflow.home_id, - depth=workflow.depth, - immediate_upstream=workflow.immediate_upstream, - immediate_downstream=workflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -518,6 +456,7 @@ def _workflow_from_nested(nested: WorkflowNested) -> Workflow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -526,9 +465,6 @@ def _workflow_from_nested(nested: WorkflowNested) -> Workflow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_workflow_attrs(attrs), # Merged relationship attributes **merged_rels, diff --git a/pyatlan_v9/model/assets/workflow_related.py b/pyatlan_v9/model/assets/workflow_related.py index ecedda859..8f83b7970 100644 --- a/pyatlan_v9/model/assets/workflow_related.py +++ b/pyatlan_v9/model/assets/workflow_related.py @@ -76,16 +76,16 @@ class RelatedWorkflowRun(RelatedWorkflow): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "WorkflowRun" so it serializes correctly - workflow_run_workflow_guid: Union[str, None, UnsetType] = UNSET + workflow_workflow_guid: Union[str, None, UnsetType] = UNSET """GUID of the workflow from which this run was created.""" - workflow_run_type: Union[str, None, UnsetType] = UNSET + workflow_type: Union[str, None, UnsetType] = UNSET """Type of the workflow from which this run was created.""" - workflow_run_action_choices: Union[List[str], None, UnsetType] = UNSET + workflow_action_choices: Union[List[str], None, UnsetType] = UNSET """List of workflow run action choices.""" - workflow_run_on_asset_guid: Union[str, None, UnsetType] = UNSET + workflow_on_asset_guid: Union[str, None, UnsetType] = UNSET """The asset for which this run was created.""" workflow_run_comment: Union[str, None, UnsetType] = UNSET @@ -94,19 +94,19 @@ class RelatedWorkflowRun(RelatedWorkflow): workflow_run_config: Union[str, None, UnsetType] = UNSET """Details of the approval workflow run.""" - workflow_run_status: Union[str, None, UnsetType] = UNSET + workflow_status: Union[str, None, UnsetType] = UNSET """Status of the run.""" - workflow_run_expires_at: Union[int, None, UnsetType] = UNSET + workflow_expires_at: Union[int, None, UnsetType] = UNSET """Time at which this run will expire.""" - workflow_run_created_by: Union[str, None, UnsetType] = UNSET + workflow_created_by: Union[str, None, UnsetType] = UNSET """Username of the user who created this workflow run.""" - workflow_run_updated_by: Union[str, None, UnsetType] = UNSET + workflow_updated_by: Union[str, None, UnsetType] = UNSET """Username of the user who updated this workflow run.""" - workflow_run_deleted_at: Union[int, None, UnsetType] = UNSET + workflow_deleted_at: Union[int, None, UnsetType] = UNSET """Deletion time of this workflow run.""" def __post_init__(self) -> None: diff --git a/requirements.txt b/requirements.txt index 4874f45c3..e69de29bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,274 +0,0 @@ -# This file was autogenerated by uv via the following command: -# uv export --all-extras --no-hashes --e . -annotated-types==0.7.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pydantic -anyio==4.12.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via httpx -authlib==1.6.9 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -backports-asyncio-runner==1.2.0 ; python_full_version < '3.11' and platform_python_implementation == 'CPython' - # via pytest-asyncio -backports-tarfile==1.2.0 ; python_full_version < '3.12' and platform_python_implementation == 'CPython' - # via jaraco-context -certifi==2026.2.25 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # httpcore - # httpx - # requests -cffi==2.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via cryptography -cfgv==3.4.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pre-commit -cfgv==3.5.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pre-commit -charset-normalizer==3.4.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via requests -colorama==0.4.6 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' and sys_platform == 'win32' - # via pytest -coverage==7.10.7 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pytest-cov -coverage==7.13.4 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pytest-cov -cryptography==46.0.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # authlib - # secretstorage - # types-authlib -deepdiff==8.6.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -distlib==0.4.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via virtualenv -docutils==0.21.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via readme-renderer -exceptiongroup==1.3.1 ; python_full_version < '3.11' and platform_python_implementation == 'CPython' - # via - # anyio - # pytest -filelock==3.19.1 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via - # python-discovery - # virtualenv -filelock==3.25.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # python-discovery - # virtualenv -h11==0.16.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via httpcore -httpcore==1.0.9 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via httpx -httpx==0.28.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # httpx-retries - # pyatlan -httpx-retries==0.4.6 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -id==1.5.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via twine -id==1.6.1 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -identify==2.6.15 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pre-commit -identify==2.6.17 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pre-commit -idna==3.11 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # anyio - # httpx - # requests - # yarl -importlib-metadata==8.7.1 ; (python_full_version < '3.12' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython') or (python_full_version < '3.10' and platform_machine == 'ppc64le' and platform_python_implementation == 'CPython') or (python_full_version < '3.10' and platform_machine == 's390x' and platform_python_implementation == 'CPython') - # via - # keyring - # twine -iniconfig==2.1.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pytest -iniconfig==2.3.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pytest -jaraco-classes==3.4.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' - # via keyring -jaraco-context==6.1.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via keyring -jaraco-functools==4.4.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' - # via keyring -jeepney==0.9.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' and sys_platform == 'linux' - # via - # keyring - # secretstorage -jinja2==3.1.6 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -keyring==25.7.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' - # via twine -lazy-loader==0.4 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -librt==0.8.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via mypy -markdown-it-py==3.0.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via rich -markdown-it-py==4.0.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via rich -markupsafe==3.0.3 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via jinja2 -mdurl==0.1.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via markdown-it-py -more-itertools==10.8.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' - # via - # jaraco-classes - # jaraco-functools -msgspec==0.20.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -multidict==6.7.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via yarl -mypy==1.19.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -mypy-extensions==1.1.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via mypy -nanoid==2.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -networkx==3.2.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via networkx-stubs -networkx-stubs==0.0.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -nh3==0.3.3 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via readme-renderer -nodeenv==1.10.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pre-commit -orderly-set==5.5.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via deepdiff -packaging==26.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # lazy-loader - # pytest - # twine -pathspec==1.0.4 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via mypy -platformdirs==4.4.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via - # python-discovery - # virtualenv -platformdirs==4.9.4 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # python-discovery - # virtualenv -pluggy==1.6.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pytest - # pytest-cov -pre-commit==4.3.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -propcache==0.4.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via yarl -pycparser==2.23 ; python_full_version < '3.10' and implementation_name != 'PyPy' and platform_python_implementation == 'CPython' - # via cffi -pycparser==3.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and implementation_name != 'PyPy' and platform_python_implementation == 'CPython' - # via cffi -pydantic==2.12.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -pydantic-core==2.41.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pydantic -pygments==2.19.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pytest - # readme-renderer - # rich -pytest==8.4.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pytest-asyncio - # pytest-cov - # pytest-order - # pytest-sugar - # pytest-timer - # pytest-vcr -pytest-asyncio==1.2.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-cov==7.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-order==1.3.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-sugar==1.1.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-timer==1.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-vcr==1.0.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -python-dateutil==2.9.0.post0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -python-discovery==1.1.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via virtualenv -pytz==2026.1.post1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -pywin32-ctypes==0.2.3 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' and sys_platform == 'win32' - # via keyring -pyyaml==6.0.3 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pre-commit - # pyatlan - # vcrpy -readme-renderer==44.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -requests==2.32.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # id - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -rfc3986==2.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -rich==14.3.3 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -ruff==0.15.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -secretstorage==3.3.3 ; python_full_version < '3.10' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' and sys_platform == 'linux' - # via keyring -secretstorage==3.5.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' and sys_platform == 'linux' - # via keyring -six==1.17.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via python-dateutil -tenacity==9.1.2 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pyatlan -tenacity==9.1.4 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -termcolor==3.1.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via - # pytest-sugar - # pytest-timer -termcolor==3.3.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pytest-sugar - # pytest-timer -tomli==2.4.0 ; python_full_version <= '3.11' and platform_python_implementation == 'CPython' - # via - # coverage - # mypy - # pytest -twine==6.2.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -types-authlib==1.6.7.20260208 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -types-retry==0.9.9.20250322 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -types-setuptools==81.0.0.20260209 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -typing-extensions==4.15.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # anyio - # cryptography - # exceptiongroup - # multidict - # mypy - # pydantic - # pydantic-core - # pytest-asyncio - # typing-inspection - # virtualenv -typing-inspection==0.4.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pydantic -urllib3==1.26.20 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via - # requests - # twine - # vcrpy -urllib3==2.6.3 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # id - # requests - # twine - # vcrpy -vcrpy==7.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pytest-vcr -virtualenv==21.1.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pre-commit -wrapt==2.1.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via vcrpy -yarl==1.22.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via vcrpy -yarl==1.23.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via vcrpy -zipp==3.23.0 ; (python_full_version < '3.12' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython') or (python_full_version < '3.10' and platform_machine == 'ppc64le' and platform_python_implementation == 'CPython') or (python_full_version < '3.10' and platform_machine == 's390x' and platform_python_implementation == 'CPython') - # via importlib-metadata