@@ -330,60 +330,6 @@ def _make_ml_model_group_urn(self, model: Model) -> str:
330
330
)
331
331
return urn
332
332
333
- # def _gen_data_process_workunits(
334
- # self, job: VertexAiResourceNoun
335
- # ) -> Iterable[MetadataWorkUnit]:
336
- # """
337
- # Generate a work unit for VertexAI Training Job
338
- # """
339
- #
340
- # created_time = (
341
- # int(job.create_time.timestamp() * 1000)
342
- # if job.create_time
343
- # else int(time.time() * 1000)
344
- # )
345
- # created_actor = f"urn:li:platformResource:{self.platform}"
346
- #
347
- # job_id = self._make_vertexai_job_name(entity_id=job.name)
348
- # job_urn = builder.make_data_process_instance_urn(job_id)
349
- #
350
- # aspects: List[_Aspect] = list()
351
- # aspects.append(
352
- # DataProcessInstancePropertiesClass(
353
- # name=job_id,
354
- # created=AuditStampClass(
355
- # time=created_time,
356
- # actor=created_actor,
357
- # ),
358
- # externalUrl=self._make_job_external_url(job),
359
- # customProperties={
360
- # "displayName": job.display_name,
361
- # "jobType": job.__class__.__name__,
362
- # },
363
- # )
364
- # )
365
- #
366
- # aspects.append(
367
- # MLTrainingRunProperties(
368
- # externalUrl=self._make_job_external_url(job), id=job.name
369
- # )
370
- # )
371
- # aspects.append(SubTypesClass(typeNames=[MLTypes.TRAINING_JOB]))
372
- #
373
- # aspects.append(ContainerClass(container=self._get_project_container().as_urn()))
374
- #
375
- # # TODO add status of the job
376
- # # aspects.append(
377
- # # DataProcessInstanceRunEventClass(
378
- # # status=DataProcessRunStatusClass.COMPLETE,
379
- # # timestampMillis=0
380
- # # )
381
- # # }
382
- #
383
- # yield from auto_workunit(
384
- # MetadataChangeProposalWrapper.construct_many(job_urn, aspects=aspects)
385
- # )
386
-
387
333
def _get_project_container (self ) -> ProjectIdKey :
388
334
return ProjectIdKey (project_id = self .config .project_id , platform = self .platform )
389
335
@@ -404,36 +350,6 @@ def _search_model_version(
404
350
return version
405
351
return None
406
352
407
- # def _get_job_output_workunits(
408
- # self, job: VertexAiResourceNoun
409
- # ) -> Iterable[MetadataWorkUnit]:
410
- # """
411
- # This method creates work units that link the training job to the model version
412
- # that it produces. It checks if the job configuration contains a model to upload,
413
- # and if so, it generates a work unit for the model version with the training job
414
- # as part of its properties.
415
- # """
416
- #
417
- # job_conf = job.to_dict()
418
- # if (
419
- # "modelToUpload" in job_conf
420
- # and "name" in job_conf["modelToUpload"]
421
- # and job_conf["modelToUpload"]["name"]
422
- # ):
423
- # model_version_str = job_conf["modelToUpload"]["versionId"]
424
- # job_urn = self._make_job_urn(job)
425
- #
426
- # model = Model(model_name=job_conf["modelToUpload"]["name"])
427
- # model_version = self._search_model_version(model, model_version_str)
428
- # if model and model_version:
429
- # logger.info(
430
- # f"Found output model (name:{model.display_name} id:{model_version_str}) "
431
- # f"for training job: {job.display_name}"
432
- # )
433
- # yield from self._gen_ml_model_endpoint_workunits(
434
- # model, model_version, job_urn
435
- # )
436
-
437
353
def _search_dataset (self , dataset_id : str ) -> Optional [VertexAiResourceNoun ]:
438
354
"""
439
355
Search for a dataset by its ID in Vertex AI.
@@ -491,30 +407,6 @@ def _get_dataset_workunits(
491
407
MetadataChangeProposalWrapper .construct_many (dataset_urn , aspects = aspects )
492
408
)
493
409
494
- # def _get_job_input_workunits(
495
- # self, job: VertexAiResourceNoun
496
- # ) -> Iterable[MetadataWorkUnit]:
497
- # """
498
- # Generate work units for the input data of a training job.
499
- # This method checks if the training job is an AutoML job and if it has an input dataset
500
- # configuration. If so, it creates a work unit for the input dataset.
501
- # """
502
- #
503
- # if self._is_automl_job(job):
504
- # job_conf = job.to_dict()
505
- # if (
506
- # "inputDataConfig" in job_conf
507
- # and "datasetId" in job_conf["inputDataConfig"]
508
- # ):
509
- # # Create URN of Input Dataset for Training Job
510
- # dataset_id = job_conf["inputDataConfig"]["datasetId"]
511
- # logger.info(
512
- # f"Found input dataset (id: {dataset_id}) for training job ({job.display_name})"
513
- # )
514
- #
515
- # if dataset_id:
516
- # yield from self._gen_input_dataset_workunits(job, dataset_id)
517
-
518
410
def _get_training_job_metadata (
519
411
self , job : VertexAiResourceNoun
520
412
) -> TrainingJobMetadata :
0 commit comments