Skip to content

Commit eb261c3

Browse files
committed
delete commented out code
1 parent 4b09365 commit eb261c3

File tree

2 files changed

+1
-110
lines changed

2 files changed

+1
-110
lines changed

metadata-ingestion/src/datahub/ingestion/source/vertexai.py

-108
Original file line numberDiff line numberDiff line change
@@ -330,60 +330,6 @@ def _make_ml_model_group_urn(self, model: Model) -> str:
330330
)
331331
return urn
332332

333-
# def _gen_data_process_workunits(
334-
# self, job: VertexAiResourceNoun
335-
# ) -> Iterable[MetadataWorkUnit]:
336-
# """
337-
# Generate a work unit for VertexAI Training Job
338-
# """
339-
#
340-
# created_time = (
341-
# int(job.create_time.timestamp() * 1000)
342-
# if job.create_time
343-
# else int(time.time() * 1000)
344-
# )
345-
# created_actor = f"urn:li:platformResource:{self.platform}"
346-
#
347-
# job_id = self._make_vertexai_job_name(entity_id=job.name)
348-
# job_urn = builder.make_data_process_instance_urn(job_id)
349-
#
350-
# aspects: List[_Aspect] = list()
351-
# aspects.append(
352-
# DataProcessInstancePropertiesClass(
353-
# name=job_id,
354-
# created=AuditStampClass(
355-
# time=created_time,
356-
# actor=created_actor,
357-
# ),
358-
# externalUrl=self._make_job_external_url(job),
359-
# customProperties={
360-
# "displayName": job.display_name,
361-
# "jobType": job.__class__.__name__,
362-
# },
363-
# )
364-
# )
365-
#
366-
# aspects.append(
367-
# MLTrainingRunProperties(
368-
# externalUrl=self._make_job_external_url(job), id=job.name
369-
# )
370-
# )
371-
# aspects.append(SubTypesClass(typeNames=[MLTypes.TRAINING_JOB]))
372-
#
373-
# aspects.append(ContainerClass(container=self._get_project_container().as_urn()))
374-
#
375-
# # TODO add status of the job
376-
# # aspects.append(
377-
# # DataProcessInstanceRunEventClass(
378-
# # status=DataProcessRunStatusClass.COMPLETE,
379-
# # timestampMillis=0
380-
# # )
381-
# # }
382-
#
383-
# yield from auto_workunit(
384-
# MetadataChangeProposalWrapper.construct_many(job_urn, aspects=aspects)
385-
# )
386-
387333
def _get_project_container(self) -> ProjectIdKey:
388334
return ProjectIdKey(project_id=self.config.project_id, platform=self.platform)
389335

@@ -404,36 +350,6 @@ def _search_model_version(
404350
return version
405351
return None
406352

407-
# def _get_job_output_workunits(
408-
# self, job: VertexAiResourceNoun
409-
# ) -> Iterable[MetadataWorkUnit]:
410-
# """
411-
# This method creates work units that link the training job to the model version
412-
# that it produces. It checks if the job configuration contains a model to upload,
413-
# and if so, it generates a work unit for the model version with the training job
414-
# as part of its properties.
415-
# """
416-
#
417-
# job_conf = job.to_dict()
418-
# if (
419-
# "modelToUpload" in job_conf
420-
# and "name" in job_conf["modelToUpload"]
421-
# and job_conf["modelToUpload"]["name"]
422-
# ):
423-
# model_version_str = job_conf["modelToUpload"]["versionId"]
424-
# job_urn = self._make_job_urn(job)
425-
#
426-
# model = Model(model_name=job_conf["modelToUpload"]["name"])
427-
# model_version = self._search_model_version(model, model_version_str)
428-
# if model and model_version:
429-
# logger.info(
430-
# f"Found output model (name:{model.display_name} id:{model_version_str}) "
431-
# f"for training job: {job.display_name}"
432-
# )
433-
# yield from self._gen_ml_model_endpoint_workunits(
434-
# model, model_version, job_urn
435-
# )
436-
437353
def _search_dataset(self, dataset_id: str) -> Optional[VertexAiResourceNoun]:
438354
"""
439355
Search for a dataset by its ID in Vertex AI.
@@ -491,30 +407,6 @@ def _get_dataset_workunits(
491407
MetadataChangeProposalWrapper.construct_many(dataset_urn, aspects=aspects)
492408
)
493409

494-
# def _get_job_input_workunits(
495-
# self, job: VertexAiResourceNoun
496-
# ) -> Iterable[MetadataWorkUnit]:
497-
# """
498-
# Generate work units for the input data of a training job.
499-
# This method checks if the training job is an AutoML job and if it has an input dataset
500-
# configuration. If so, it creates a work unit for the input dataset.
501-
# """
502-
#
503-
# if self._is_automl_job(job):
504-
# job_conf = job.to_dict()
505-
# if (
506-
# "inputDataConfig" in job_conf
507-
# and "datasetId" in job_conf["inputDataConfig"]
508-
# ):
509-
# # Create URN of Input Dataset for Training Job
510-
# dataset_id = job_conf["inputDataConfig"]["datasetId"]
511-
# logger.info(
512-
# f"Found input dataset (id: {dataset_id}) for training job ({job.display_name})"
513-
# )
514-
#
515-
# if dataset_id:
516-
# yield from self._gen_input_dataset_workunits(job, dataset_id)
517-
518410
def _get_training_job_metadata(
519411
self, job: VertexAiResourceNoun
520412
) -> TrainingJobMetadata:

metadata-ingestion/tests/unit/test_vertexai_source.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,9 @@
55
from unittest.mock import MagicMock, patch
66

77
import pytest
8-
from google.cloud import aiplatform
98
from google.cloud.aiplatform import AutoMLTabularTrainingJob
109
from google.cloud.aiplatform.base import VertexAiResourceNoun
1110
from google.cloud.aiplatform.models import Endpoint, Model, VersionInfo
12-
from google.cloud.aiplatform.training_jobs import _TrainingJob
1311
from google.protobuf import timestamp_pb2
1412

1513
import datahub.emitter.mce_builder as builder
@@ -126,6 +124,7 @@ def source() -> VertexAISource:
126124
config=VertexAIConfig(project_id=PROJECT_ID, region=REGION),
127125
)
128126

127+
129128
@patch("google.cloud.aiplatform.Model.list")
130129
def test_get_ml_model_workunits(mock_list: List[Model], source: VertexAISource) -> None:
131130
mock_models = gen_mock_models()

0 commit comments

Comments
 (0)