From 65224d6ad788ce85540d80b9870b7ec10f84b5f1 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 2 Dec 2021 21:48:42 -0800 Subject: [PATCH 01/17] feat: Made display_name optional for most calls --- google/cloud/aiplatform/datasets/dataset.py | 8 +- .../aiplatform/datasets/image_dataset.py | 7 +- .../aiplatform/datasets/tabular_dataset.py | 8 +- .../cloud/aiplatform/datasets/text_dataset.py | 8 +- .../datasets/time_series_dataset.py | 8 +- .../aiplatform/datasets/video_dataset.py | 8 +- google/cloud/aiplatform/jobs.py | 22 ++++-- google/cloud/aiplatform/models.py | 33 ++++++-- google/cloud/aiplatform/pipeline_jobs.py | 6 +- .../tensorboard/tensorboard_resource.py | 7 +- google/cloud/aiplatform/training_jobs.py | 75 ++++++++++++++----- 11 files changed, 139 insertions(+), 51 deletions(-) diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 30f518fb71..6d552cac8d 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -15,6 +15,7 @@ # limitations under the License. # +import datetime from typing import Dict, List, Optional, Sequence, Tuple, Union from google.api_core import operation @@ -106,12 +107,12 @@ def _validate_metadata_schema_uri(self) -> None: @classmethod def create( cls, - display_name: str, metadata_schema_uri: str, gcs_source: Optional[Union[str, Sequence[str]]] = None, bq_source: Optional[str] = None, import_schema_uri: Optional[str] = None, data_item_labels: Optional[Dict] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -125,7 +126,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. metadata_schema_uri (str): @@ -208,7 +209,8 @@ def create( dataset (Dataset): Instantiated representation of the managed dataset resource. """ - + if not display_name: + display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/image_dataset.py b/google/cloud/aiplatform/datasets/image_dataset.py index bebc75beab..0837997567 100644 --- a/google/cloud/aiplatform/datasets/image_dataset.py +++ b/google/cloud/aiplatform/datasets/image_dataset.py @@ -15,6 +15,7 @@ # limitations under the License. # +import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials @@ -36,7 +37,7 @@ class ImageDataset(datasets._Dataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, import_schema_uri: Optional[str] = None, data_item_labels: Optional[Dict] = None, @@ -53,7 +54,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -126,6 +127,8 @@ def create( image_dataset (ImageDataset): Instantiated representation of the managed image dataset resource. """ + if not display_name: + display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/datasets/tabular_dataset.py b/google/cloud/aiplatform/datasets/tabular_dataset.py index 57ad827b31..57fcc9fd6e 100644 --- a/google/cloud/aiplatform/datasets/tabular_dataset.py +++ b/google/cloud/aiplatform/datasets/tabular_dataset.py @@ -15,6 +15,7 @@ # limitations under the License. # +import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials @@ -36,7 +37,7 @@ class TabularDataset(datasets._ColumnNamesDataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, bq_source: Optional[str] = None, project: Optional[str] = None, @@ -51,7 +52,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -107,7 +108,8 @@ def create( tabular_dataset (TabularDataset): Instantiated representation of the managed tabular dataset resource. """ - + if not display_name: + display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/text_dataset.py b/google/cloud/aiplatform/datasets/text_dataset.py index 140fd17335..61003572dc 100644 --- a/google/cloud/aiplatform/datasets/text_dataset.py +++ b/google/cloud/aiplatform/datasets/text_dataset.py @@ -15,6 +15,7 @@ # limitations under the License. # +import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials @@ -36,7 +37,7 @@ class TextDataset(datasets._Dataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, import_schema_uri: Optional[str] = None, data_item_labels: Optional[Dict] = None, @@ -60,7 +61,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -133,7 +134,8 @@ def create( text_dataset (TextDataset): Instantiated representation of the managed text dataset resource. """ - + if not display_name: + display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/time_series_dataset.py b/google/cloud/aiplatform/datasets/time_series_dataset.py index aab96eda90..c07200557f 100644 --- a/google/cloud/aiplatform/datasets/time_series_dataset.py +++ b/google/cloud/aiplatform/datasets/time_series_dataset.py @@ -15,6 +15,7 @@ # limitations under the License. # +import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials @@ -36,7 +37,7 @@ class TimeSeriesDataset(datasets._ColumnNamesDataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, bq_source: Optional[str] = None, project: Optional[str] = None, @@ -51,7 +52,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -108,7 +109,8 @@ def create( Instantiated representation of the managed time series dataset resource. """ - + if not display_name: + display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/video_dataset.py b/google/cloud/aiplatform/datasets/video_dataset.py index 2964b77f19..564523255b 100644 --- a/google/cloud/aiplatform/datasets/video_dataset.py +++ b/google/cloud/aiplatform/datasets/video_dataset.py @@ -15,6 +15,7 @@ # limitations under the License. # +import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials @@ -36,7 +37,7 @@ class VideoDataset(datasets._Dataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, import_schema_uri: Optional[str] = None, data_item_labels: Optional[Dict] = None, @@ -53,7 +54,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -126,7 +127,8 @@ def create( video_dataset (VideoDataset): Instantiated representation of the managed video dataset resource. """ - + if not display_name: + display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 2ce9d8bbb9..f657af6913 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -977,9 +977,9 @@ class CustomJob(_RunnableJob): def __init__( self, - display_name: str, worker_pool_specs: Union[List[Dict], List[aiplatform.gapic.WorkerPoolSpec]], base_output_dir: Optional[str] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1023,7 +1023,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of the HyperparameterTuningJob. + Optional. The user-defined name of the HyperparameterTuningJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. worker_pool_specs (Union[List[Dict], List[aiplatform.gapic.WorkerPoolSpec]]): @@ -1081,6 +1081,11 @@ def __init__( staging_bucket, "aiplatform-custom-job" ) + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) + self._gca_resource = gca_custom_job_compat.CustomJob( display_name=display_name, job_spec=gca_custom_job_compat.CustomJobSpec( @@ -1133,7 +1138,6 @@ def _log_web_access_uris(self): @classmethod def from_local_script( cls, - display_name: str, script_path: str, container_uri: str, args: Optional[Sequence[str]] = None, @@ -1149,6 +1153,7 @@ def from_local_script( reduction_server_machine_type: Optional[str] = None, reduction_server_container_uri: Optional[str] = None, base_output_dir: Optional[str] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1176,7 +1181,7 @@ def from_local_script( Args: display_name (str): - Required. The user-defined name of this CustomJob. + Optional. The user-defined name of this CustomJob. script_path (str): Required. Local path to training script. container_uri (str): @@ -1452,7 +1457,6 @@ class HyperparameterTuningJob(_RunnableJob): def __init__( self, - display_name: str, custom_job: CustomJob, metric_spec: Dict[str, str], parameter_spec: Dict[str, hyperparameter_tuning._ParameterSpec], @@ -1461,6 +1465,7 @@ def __init__( max_failed_trial_count: int = 0, search_algorithm: Optional[str] = None, measurement_selection: Optional[str] = "best", + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1526,7 +1531,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of the HyperparameterTuningJob. + Optional. The user-defined name of the HyperparameterTuningJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. custom_job (aiplatform.CustomJob): @@ -1648,6 +1653,11 @@ def __init__( ], ) + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) + self._gca_resource = gca_hyperparameter_tuning_job_compat.HyperparameterTuningJob( display_name=display_name, study_spec=study_spec, diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index e4e2947bc6..b5f4d40cff 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import datetime import pathlib import proto import re @@ -194,7 +195,7 @@ def network(self) -> Optional[str]: @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, metadata: Optional[Sequence[Tuple[str, str]]] = (), @@ -208,7 +209,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Endpoint. + Optional. The user-defined name of the Endpoint. The name can be up to 128 characters long and can be consist of any UTF-8 characters. project (str): @@ -257,6 +258,9 @@ def create( api_client = cls._instantiate_client(location=location, credentials=credentials) + if not display_name: + display_name = "Endpoint " + datetime.datetime.now().isoformat(sep=" ") + utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) @@ -1574,7 +1578,6 @@ def update( @base.optional_sync() def upload( cls, - display_name: str, serving_container_image_uri: str, *, artifact_uri: Optional[str] = None, @@ -1590,6 +1593,7 @@ def upload( prediction_schema_uri: Optional[str] = None, explanation_metadata: Optional[explain.ExplanationMetadata] = None, explanation_parameters: Optional[explain.ExplanationParameters] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1611,7 +1615,7 @@ def upload( Args: display_name (str): - Required. The display name of the Model. The name can be up to 128 + Optional. The display name of the Model. The name can be up to 128 characters long and can be consist of any UTF-8 characters. serving_container_image_uri (str): Required. The URI of the Model serving container. @@ -1749,6 +1753,8 @@ def upload( is specified. Also if model directory does not contain a supported model file. """ + if not display_name: + display_name = "Model " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) @@ -2535,7 +2541,7 @@ def upload_xgboost_model_file( cls, model_file_path: str, xgboost_version: Optional[str] = None, - display_name: str = "XGBoost model", + display_name: Optional[str] = None, description: Optional[str] = None, instance_schema_uri: Optional[str] = None, parameters_schema_uri: Optional[str] = None, @@ -2665,6 +2671,9 @@ def upload_xgboost_model_file( is specified. Also if model directory does not contain a supported model file. """ + if not display_name: + display_name = "XGBoost model " + datetime.datetime.now().isoformat(sep=" ") + XGBOOST_SUPPORTED_MODEL_FILE_EXTENSIONS = [ ".pkl", ".joblib", @@ -2730,7 +2739,7 @@ def upload_scikit_learn_model_file( cls, model_file_path: str, sklearn_version: Optional[str] = None, - display_name: str = "Scikit-learn model", + display_name: Optional[str] = None, description: Optional[str] = None, instance_schema_uri: Optional[str] = None, parameters_schema_uri: Optional[str] = None, @@ -2861,6 +2870,11 @@ def upload_scikit_learn_model_file( is specified. Also if model directory does not contain a supported model file. """ + if not display_name: + display_name = "Scikit-Learn model " + datetime.datetime.now().isoformat( + sep=" " + ) + SKLEARN_SUPPORTED_MODEL_FILE_EXTENSIONS = [ ".pkl", ".joblib", @@ -2925,7 +2939,7 @@ def upload_tensorflow_saved_model( saved_model_dir: str, tensorflow_version: Optional[str] = None, use_gpu: bool = False, - display_name: str = "Tensorflow model", + display_name: Optional[str] = None, description: Optional[str] = None, instance_schema_uri: Optional[str] = None, parameters_schema_uri: Optional[str] = None, @@ -3058,6 +3072,11 @@ def upload_tensorflow_saved_model( is specified. Also if model directory does not contain a supported model file. """ + if not display_name: + display_name = "Tensorflow model " + datetime.datetime.now().isoformat( + sep=" " + ) + container_image_uri = aiplatform.helpers.get_prebuilt_prediction_container_uri( region=location, framework="tensorflow", diff --git a/google/cloud/aiplatform/pipeline_jobs.py b/google/cloud/aiplatform/pipeline_jobs.py index c756589513..0088842040 100644 --- a/google/cloud/aiplatform/pipeline_jobs.py +++ b/google/cloud/aiplatform/pipeline_jobs.py @@ -88,8 +88,8 @@ class PipelineJob(base.VertexAiResourceNounWithFutureManager): def __init__( self, - display_name: str, template_path: str, + display_name: Optional[str], job_id: Optional[str] = None, pipeline_root: Optional[str] = None, parameter_values: Optional[Dict[str, Any]] = None, @@ -105,7 +105,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this Pipeline. + Optional. The user-defined name of this Pipeline. template_path (str): Required. The path of PipelineJob or PipelineSpec JSON file. It can be a local path or a Google Cloud Storage URI. @@ -156,6 +156,8 @@ def __init__( Raises: ValueError: If job_id or labels have incorrect format. """ + if not display_name: + display_name = "PipelineJob " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 5871bae832..83fb4a513f 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -15,6 +15,7 @@ # limitations under the License. # +import datetime from typing import Dict, List, Optional, Sequence, Tuple from google.auth import credentials as auth_credentials @@ -82,7 +83,7 @@ def __init__( @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, project: Optional[str] = None, @@ -106,7 +107,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Tensorboard. + Optional. The user-defined name of the Tensorboard. The name can be up to 128 characters long and can be consist of any UTF-8 characters. description (str): @@ -148,6 +149,8 @@ def create( tensorboard (Tensorboard): Instantiated representation of the managed tensorboard resource. """ + if not display_name: + display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index 38aafef4fa..3825e3b657 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -78,7 +78,7 @@ class _TrainingJob(base.VertexAiResourceNounWithFutureManager): def __init__( self, - display_name: str, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -90,7 +90,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. project (str): Optional project to retrieve model from. If not set, project set in aiplatform.init will be used. @@ -135,6 +135,10 @@ def __init__( Overrides encryption_spec_key_name set in aiplatform.init. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) @@ -1013,7 +1017,6 @@ class _CustomTrainingJob(_TrainingJob): def __init__( self, - display_name: str, container_uri: str, model_serving_container_image_uri: Optional[str] = None, model_serving_container_predict_route: Optional[str] = None, @@ -1026,6 +1029,7 @@ def __init__( model_instance_schema_uri: Optional[str] = None, model_parameters_schema_uri: Optional[str] = None, model_prediction_schema_uri: Optional[str] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1037,7 +1041,7 @@ def __init__( """ Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. container_uri (str): Required: Uri of the training container image in the GCR. model_serving_container_image_uri (str): @@ -1173,6 +1177,10 @@ def __init__( Bucket used to stage source and training artifacts. Overrides staging_bucket set in aiplatform.init. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) super().__init__( display_name=display_name, project=project, @@ -1533,7 +1541,6 @@ class CustomTrainingJob(_CustomTrainingJob): def __init__( self, - display_name: str, script_path: str, container_uri: str, requirements: Optional[Sequence[str]] = None, @@ -1548,6 +1555,7 @@ def __init__( model_instance_schema_uri: Optional[str] = None, model_parameters_schema_uri: Optional[str] = None, model_prediction_schema_uri: Optional[str] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1593,7 +1601,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. script_path (str): Required. Local path to training script. container_uri (str): Required: Uri of the training container image in the GCR. @@ -1732,6 +1740,10 @@ def __init__( Bucket used to stage source and training artifacts. Overrides staging_bucket set in aiplatform.init. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) super().__init__( display_name=display_name, project=project, @@ -2340,7 +2352,6 @@ class CustomContainerTrainingJob(_CustomTrainingJob): def __init__( self, - display_name: str, container_uri: str, command: Sequence[str] = None, model_serving_container_image_uri: Optional[str] = None, @@ -2354,6 +2365,7 @@ def __init__( model_instance_schema_uri: Optional[str] = None, model_parameters_schema_uri: Optional[str] = None, model_prediction_schema_uri: Optional[str] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -2398,7 +2410,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. container_uri (str): Required: Uri of the training container image in the GCR. command (Sequence[str]): @@ -2537,6 +2549,10 @@ def __init__( Bucket used to stage source and training artifacts. Overrides staging_bucket set in aiplatform.init. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) super().__init__( display_name=display_name, project=project, @@ -3124,13 +3140,13 @@ class AutoMLTabularTrainingJob(_TrainingJob): def __init__( self, - display_name: str, optimization_prediction_type: str, optimization_objective: Optional[str] = None, column_specs: Optional[Dict[str, str]] = None, column_transformations: Optional[List[Dict[str, Dict[str, str]]]] = None, optimization_objective_recall_value: Optional[float] = None, optimization_objective_precision_value: Optional[float] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -3152,7 +3168,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. optimization_prediction_type (str): The type of prediction the Model is to produce. "classification" - Predict one out of multiple target values is @@ -3270,6 +3286,10 @@ def __init__( Raises: ValueError: If both column_transformations and column_specs were provided. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) super().__init__( display_name=display_name, project=project, @@ -3760,7 +3780,7 @@ class AutoMLForecastingTrainingJob(_TrainingJob): def __init__( self, - display_name: str, + display_name: Optional[str] = None, optimization_objective: Optional[str] = None, column_specs: Optional[Dict[str, str]] = None, column_transformations: Optional[List[Dict[str, Dict[str, str]]]] = None, @@ -3775,7 +3795,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. optimization_objective (str): Optional. Objective function the model is to be optimized towards. The training process creates a Model that optimizes the value of the objective @@ -3857,6 +3877,10 @@ def __init__( Raises: ValueError: If both column_transformations and column_specs were provided. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) super().__init__( display_name=display_name, project=project, @@ -4611,7 +4635,7 @@ class AutoMLImageTrainingJob(_TrainingJob): def __init__( self, - display_name: str, + display_name: Optional[str] = None, prediction_type: str = "classification", multi_label: bool = False, model_type: str = "CLOUD", @@ -4627,7 +4651,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. prediction_type (str): The type of prediction the Model is to produce, one of: "classification" - Predict one out of multiple target values is @@ -4723,6 +4747,10 @@ def __init__( Raises: ValueError: When an invalid prediction_type or model_type is provided. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) valid_model_types = constants.AUTOML_IMAGE_PREDICTION_MODEL_TYPES.get( prediction_type, None @@ -5109,7 +5137,6 @@ class CustomPythonPackageTrainingJob(_CustomTrainingJob): def __init__( self, - display_name: str, python_package_gcs_uri: str, python_module_name: str, container_uri: str, @@ -5124,6 +5151,7 @@ def __init__( model_instance_schema_uri: Optional[str] = None, model_parameters_schema_uri: Optional[str] = None, model_prediction_schema_uri: Optional[str] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -5311,6 +5339,10 @@ def __init__( Bucket used to stage source and training artifacts. Overrides staging_bucket set in aiplatform.init. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) super().__init__( display_name=display_name, project=project, @@ -5886,7 +5918,7 @@ class AutoMLVideoTrainingJob(_TrainingJob): def __init__( self, - display_name: str, + display_name: Optional[str] = None, prediction_type: str = "classification", model_type: str = "CLOUD", project: Optional[str] = None, @@ -5980,6 +6012,11 @@ def __init__( Raises: ValueError: When an invalid prediction_type and/or model_type is provided. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) + valid_model_types = constants.AUTOML_VIDEO_PREDICTION_MODEL_TYPES.get( prediction_type, None ) @@ -6253,10 +6290,10 @@ class AutoMLTextTrainingJob(_TrainingJob): def __init__( self, - display_name: str, prediction_type: str, multi_label: bool = False, sentiment_max: int = 10, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -6340,6 +6377,10 @@ def __init__( Overrides encryption_spec_key_name set in aiplatform.init. """ + if not display_name: + display_name = ( + self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") + ) super().__init__( display_name=display_name, project=project, From b2958ae93b463356ea9eb9a2ab79a0d5dc770111 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Feb 2022 18:43:31 -0800 Subject: [PATCH 02/17] Reverted the change to _Dataset.create interface --- google/cloud/aiplatform/datasets/dataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 6d552cac8d..6af44edcb5 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -107,12 +107,12 @@ def _validate_metadata_schema_uri(self) -> None: @classmethod def create( cls, + display_name: str, metadata_schema_uri: str, gcs_source: Optional[Union[str, Sequence[str]]] = None, bq_source: Optional[str] = None, import_schema_uri: Optional[str] = None, data_item_labels: Optional[Dict] = None, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, From b0e19fe04a15c13c9a910de78aca147151b4cbf9 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Feb 2022 19:23:00 -0800 Subject: [PATCH 03/17] Extracted the display name generation to the _generate_display_name method --- google/cloud/aiplatform/base.py | 7 ++++ google/cloud/aiplatform/datasets/dataset.py | 2 +- .../aiplatform/datasets/image_dataset.py | 2 +- .../aiplatform/datasets/tabular_dataset.py | 2 +- .../cloud/aiplatform/datasets/text_dataset.py | 2 +- .../datasets/time_series_dataset.py | 2 +- .../aiplatform/datasets/video_dataset.py | 2 +- google/cloud/aiplatform/jobs.py | 8 +--- google/cloud/aiplatform/models.py | 14 +++---- google/cloud/aiplatform/pipeline_jobs.py | 2 +- .../tensorboard/tensorboard_resource.py | 2 +- google/cloud/aiplatform/training_jobs.py | 40 +++++-------------- 12 files changed, 32 insertions(+), 53 deletions(-) diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index a3e8c352b0..b45331a958 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -673,6 +673,13 @@ def to_dict(self) -> Dict[str, Any]: """Returns the resource proto as a dictionary.""" return json_format.MessageToDict(self.gca_resource._pb) + @classmethod + def _generate_display_name(cls, prefix: Optional[str] = None) -> str: + """Returns a display name containing class name and time string.""" + if not prefix: + prefix = cls.name + return prefix + " " + datetime.datetime.now().isoformat(sep=" ") + def optional_sync( construct_object_on_arg: Optional[str] = None, diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 6af44edcb5..500485693b 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -210,7 +210,7 @@ def create( Instantiated representation of the managed dataset resource. """ if not display_name: - display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/image_dataset.py b/google/cloud/aiplatform/datasets/image_dataset.py index 0837997567..58806e3092 100644 --- a/google/cloud/aiplatform/datasets/image_dataset.py +++ b/google/cloud/aiplatform/datasets/image_dataset.py @@ -128,7 +128,7 @@ def create( Instantiated representation of the managed image dataset resource. """ if not display_name: - display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/datasets/tabular_dataset.py b/google/cloud/aiplatform/datasets/tabular_dataset.py index 57fcc9fd6e..c7c415a05f 100644 --- a/google/cloud/aiplatform/datasets/tabular_dataset.py +++ b/google/cloud/aiplatform/datasets/tabular_dataset.py @@ -109,7 +109,7 @@ def create( Instantiated representation of the managed tabular dataset resource. """ if not display_name: - display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/text_dataset.py b/google/cloud/aiplatform/datasets/text_dataset.py index 61003572dc..1303689e1c 100644 --- a/google/cloud/aiplatform/datasets/text_dataset.py +++ b/google/cloud/aiplatform/datasets/text_dataset.py @@ -135,7 +135,7 @@ def create( Instantiated representation of the managed text dataset resource. """ if not display_name: - display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/time_series_dataset.py b/google/cloud/aiplatform/datasets/time_series_dataset.py index c07200557f..faba45a7a1 100644 --- a/google/cloud/aiplatform/datasets/time_series_dataset.py +++ b/google/cloud/aiplatform/datasets/time_series_dataset.py @@ -110,7 +110,7 @@ def create( """ if not display_name: - display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/video_dataset.py b/google/cloud/aiplatform/datasets/video_dataset.py index 564523255b..66c8b61838 100644 --- a/google/cloud/aiplatform/datasets/video_dataset.py +++ b/google/cloud/aiplatform/datasets/video_dataset.py @@ -128,7 +128,7 @@ def create( Instantiated representation of the managed video dataset resource. """ if not display_name: - display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index f657af6913..3ec9cf275a 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -1082,9 +1082,7 @@ def __init__( ) if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() self._gca_resource = gca_custom_job_compat.CustomJob( display_name=display_name, @@ -1654,9 +1652,7 @@ def __init__( ) if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() self._gca_resource = gca_hyperparameter_tuning_job_compat.HyperparameterTuningJob( display_name=display_name, diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index b5f4d40cff..0b2998c676 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -259,7 +259,7 @@ def create( api_client = cls._instantiate_client(location=location, credentials=credentials) if not display_name: - display_name = "Endpoint " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: @@ -1754,7 +1754,7 @@ def upload( Also if model directory does not contain a supported model file. """ if not display_name: - display_name = "Model " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) @@ -2672,7 +2672,7 @@ def upload_xgboost_model_file( Also if model directory does not contain a supported model file. """ if not display_name: - display_name = "XGBoost model " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls.__class__.__generate_display_name("XGBoost model") XGBOOST_SUPPORTED_MODEL_FILE_EXTENSIONS = [ ".pkl", @@ -2871,9 +2871,7 @@ def upload_scikit_learn_model_file( Also if model directory does not contain a supported model file. """ if not display_name: - display_name = "Scikit-Learn model " + datetime.datetime.now().isoformat( - sep=" " - ) + display_name = cls._generate_display_name("Scikit-Learn model") SKLEARN_SUPPORTED_MODEL_FILE_EXTENSIONS = [ ".pkl", @@ -3073,9 +3071,7 @@ def upload_tensorflow_saved_model( Also if model directory does not contain a supported model file. """ if not display_name: - display_name = "Tensorflow model " + datetime.datetime.now().isoformat( - sep=" " - ) + display_name = cls._generate_display_name("Tensorflow model") container_image_uri = aiplatform.helpers.get_prebuilt_prediction_container_uri( region=location, diff --git a/google/cloud/aiplatform/pipeline_jobs.py b/google/cloud/aiplatform/pipeline_jobs.py index 0088842040..f7f6492708 100644 --- a/google/cloud/aiplatform/pipeline_jobs.py +++ b/google/cloud/aiplatform/pipeline_jobs.py @@ -157,7 +157,7 @@ def __init__( ValueError: If job_id or labels have incorrect format. """ if not display_name: - display_name = "PipelineJob " + datetime.datetime.now().isoformat(sep=" ") + display_name = self.__class__._generate_display_name() utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 83fb4a513f..0e5160c2be 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -150,7 +150,7 @@ def create( Instantiated representation of the managed tensorboard resource. """ if not display_name: - display_name = cls.name + " " + datetime.datetime.now().isoformat(sep=" ") + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index 3825e3b657..903384ad39 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -136,9 +136,7 @@ def __init__( Overrides encryption_spec_key_name set in aiplatform.init. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) @@ -1178,9 +1176,7 @@ def __init__( staging_bucket set in aiplatform.init. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -1741,9 +1737,7 @@ def __init__( staging_bucket set in aiplatform.init. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -2550,9 +2544,7 @@ def __init__( staging_bucket set in aiplatform.init. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -3287,9 +3279,7 @@ def __init__( ValueError: If both column_transformations and column_specs were provided. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -3878,9 +3868,7 @@ def __init__( ValueError: If both column_transformations and column_specs were provided. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -4748,9 +4736,7 @@ def __init__( ValueError: When an invalid prediction_type or model_type is provided. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() valid_model_types = constants.AUTOML_IMAGE_PREDICTION_MODEL_TYPES.get( prediction_type, None @@ -5340,9 +5326,7 @@ def __init__( staging_bucket set in aiplatform.init. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -6013,9 +5997,7 @@ def __init__( ValueError: When an invalid prediction_type and/or model_type is provided. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() valid_model_types = constants.AUTOML_VIDEO_PREDICTION_MODEL_TYPES.get( prediction_type, None @@ -6378,9 +6360,7 @@ def __init__( Overrides encryption_spec_key_name set in aiplatform.init. """ if not display_name: - display_name = ( - self.__class__.name + " " + datetime.datetime.now().isoformat(sep=" ") - ) + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, From 47563a0b9cd7b9a01bcd042e0c8d339310057497 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Feb 2022 19:32:12 -0800 Subject: [PATCH 04/17] Made Model.batch_predict(job_display_name=) optional --- google/cloud/aiplatform/jobs.py | 2 ++ google/cloud/aiplatform/models.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 3ec9cf275a..befc0f3dab 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -527,6 +527,8 @@ def create( (jobs.BatchPredictionJob): Instantiated representation of the created batch prediction job. """ + if not job_display_name: + job_display_name = cls._generate_display_name() utils.validate_display_name(job_display_name) diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index 0b2998c676..a233a6d6b2 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -2145,7 +2145,7 @@ def _deploy( def batch_predict( self, - job_display_name: str, + job_display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, bigquery_source: Optional[str] = None, instances_format: str = "jsonl", @@ -2182,7 +2182,7 @@ def batch_predict( Args: job_display_name (str): - Required. The user-defined name of the BatchPredictionJob. + Optional. The user-defined name of the BatchPredictionJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source: Optional[Sequence[str]] = None From b3234dcfab2a1fe223d30df27c02d70006df972b Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Feb 2022 19:42:46 -0800 Subject: [PATCH 05/17] Removed the unused imports --- google/cloud/aiplatform/datasets/dataset.py | 1 - google/cloud/aiplatform/datasets/image_dataset.py | 1 - google/cloud/aiplatform/datasets/tabular_dataset.py | 1 - google/cloud/aiplatform/datasets/text_dataset.py | 1 - google/cloud/aiplatform/datasets/time_series_dataset.py | 1 - google/cloud/aiplatform/datasets/video_dataset.py | 1 - google/cloud/aiplatform/models.py | 1 - google/cloud/aiplatform/tensorboard/tensorboard_resource.py | 1 - 8 files changed, 8 deletions(-) diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 500485693b..8132effdb7 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -15,7 +15,6 @@ # limitations under the License. # -import datetime from typing import Dict, List, Optional, Sequence, Tuple, Union from google.api_core import operation diff --git a/google/cloud/aiplatform/datasets/image_dataset.py b/google/cloud/aiplatform/datasets/image_dataset.py index 58806e3092..918ae92417 100644 --- a/google/cloud/aiplatform/datasets/image_dataset.py +++ b/google/cloud/aiplatform/datasets/image_dataset.py @@ -15,7 +15,6 @@ # limitations under the License. # -import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials diff --git a/google/cloud/aiplatform/datasets/tabular_dataset.py b/google/cloud/aiplatform/datasets/tabular_dataset.py index c7c415a05f..f5b8b2c1a8 100644 --- a/google/cloud/aiplatform/datasets/tabular_dataset.py +++ b/google/cloud/aiplatform/datasets/tabular_dataset.py @@ -15,7 +15,6 @@ # limitations under the License. # -import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials diff --git a/google/cloud/aiplatform/datasets/text_dataset.py b/google/cloud/aiplatform/datasets/text_dataset.py index 1303689e1c..f84688129c 100644 --- a/google/cloud/aiplatform/datasets/text_dataset.py +++ b/google/cloud/aiplatform/datasets/text_dataset.py @@ -15,7 +15,6 @@ # limitations under the License. # -import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials diff --git a/google/cloud/aiplatform/datasets/time_series_dataset.py b/google/cloud/aiplatform/datasets/time_series_dataset.py index faba45a7a1..ec5546f12a 100644 --- a/google/cloud/aiplatform/datasets/time_series_dataset.py +++ b/google/cloud/aiplatform/datasets/time_series_dataset.py @@ -15,7 +15,6 @@ # limitations under the License. # -import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials diff --git a/google/cloud/aiplatform/datasets/video_dataset.py b/google/cloud/aiplatform/datasets/video_dataset.py index 66c8b61838..4290d68877 100644 --- a/google/cloud/aiplatform/datasets/video_dataset.py +++ b/google/cloud/aiplatform/datasets/video_dataset.py @@ -15,7 +15,6 @@ # limitations under the License. # -import datetime from typing import Dict, Optional, Sequence, Tuple, Union from google.auth import credentials as auth_credentials diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index a233a6d6b2..7ed2599a56 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import datetime import pathlib import proto import re diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 0e5160c2be..98d572e5b5 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -15,7 +15,6 @@ # limitations under the License. # -import datetime from typing import Dict, List, Optional, Sequence, Tuple from google.auth import credentials as auth_credentials From c0d2aee939d8871c2e9ad5c15271a39c63ad96e7 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Feb 2022 19:55:00 -0800 Subject: [PATCH 06/17] Added a unit test --- tests/unit/aiplatform/test_datasets.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/tests/unit/aiplatform/test_datasets.py b/tests/unit/aiplatform/test_datasets.py index 71ca5907ab..351b9954a0 100644 --- a/tests/unit/aiplatform/test_datasets.py +++ b/tests/unit/aiplatform/test_datasets.py @@ -1104,6 +1104,21 @@ def test_create_dataset(self, create_dataset_mock, sync): metadata=_TEST_REQUEST_METADATA, ) + @pytest.mark.usefixtures("get_dataset_tabular_bq_mock") + @pytest.mark.parametrize("sync", [True, False]) + def test_create_dataset_with_default_display_name(self, create_dataset_mock, sync): + + my_dataset = datasets.TabularDataset.create( + bq_source=_TEST_SOURCE_URI_BQ, + sync=sync, + ) + + if not sync: + my_dataset.wait() + + create_dataset_mock.assert_called_once() + create_dataset_mock.call_args[1]["display_name"].startswith("TabularDataset ") + @pytest.mark.usefixtures("get_dataset_tabular_bq_mock") def test_no_import_data_method(self): From f08c19b1eb94e60eedb1dd5bd0f4a1c148e871bf Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Fri, 4 Feb 2022 17:28:50 -0800 Subject: [PATCH 07/17] Fixed getting the class name. --- google/cloud/aiplatform/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index b45331a958..dcec7bb5d6 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -677,7 +677,7 @@ def to_dict(self) -> Dict[str, Any]: def _generate_display_name(cls, prefix: Optional[str] = None) -> str: """Returns a display name containing class name and time string.""" if not prefix: - prefix = cls.name + prefix = cls.__name__ return prefix + " " + datetime.datetime.now().isoformat(sep=" ") From 9e5a800d77fd6983cb2d8a8c99a35cf8c94ea4da Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Sun, 6 Feb 2022 18:00:32 -0800 Subject: [PATCH 08/17] Fixed code style --- tests/unit/aiplatform/test_datasets.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/unit/aiplatform/test_datasets.py b/tests/unit/aiplatform/test_datasets.py index 351b9954a0..36cd035460 100644 --- a/tests/unit/aiplatform/test_datasets.py +++ b/tests/unit/aiplatform/test_datasets.py @@ -1109,8 +1109,7 @@ def test_create_dataset(self, create_dataset_mock, sync): def test_create_dataset_with_default_display_name(self, create_dataset_mock, sync): my_dataset = datasets.TabularDataset.create( - bq_source=_TEST_SOURCE_URI_BQ, - sync=sync, + bq_source=_TEST_SOURCE_URI_BQ, sync=sync, ) if not sync: From 1cfc9c17e257158455ff1998c91d65e3c143a1b7 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Sun, 6 Feb 2022 23:35:48 -0800 Subject: [PATCH 09/17] Fixed the unit test --- tests/unit/aiplatform/test_datasets.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/unit/aiplatform/test_datasets.py b/tests/unit/aiplatform/test_datasets.py index 36cd035460..60256e5d23 100644 --- a/tests/unit/aiplatform/test_datasets.py +++ b/tests/unit/aiplatform/test_datasets.py @@ -1116,7 +1116,9 @@ def test_create_dataset_with_default_display_name(self, create_dataset_mock, syn my_dataset.wait() create_dataset_mock.assert_called_once() - create_dataset_mock.call_args[1]["display_name"].startswith("TabularDataset ") + create_dataset_mock.call_args[1]["dataset"].display_name.startswith( + "TabularDataset " + ) @pytest.mark.usefixtures("get_dataset_tabular_bq_mock") def test_no_import_data_method(self): From 00977bea52fec727c97d2fb068aa237ece9907d2 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 24 Feb 2022 21:33:34 -0800 Subject: [PATCH 10/17] Restore the parameter ordering as requested by reviewer The parameter default values had to be removed due to Python's syntax. --- google/cloud/aiplatform/training_jobs.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index 417fc57a49..2aeabd37f0 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -1015,6 +1015,8 @@ class _CustomTrainingJob(_TrainingJob): def __init__( self, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], container_uri: str, model_serving_container_image_uri: Optional[str] = None, model_serving_container_predict_route: Optional[str] = None, @@ -1027,7 +1029,6 @@ def __init__( model_instance_schema_uri: Optional[str] = None, model_parameters_schema_uri: Optional[str] = None, model_prediction_schema_uri: Optional[str] = None, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1554,6 +1555,8 @@ class CustomTrainingJob(_CustomTrainingJob): def __init__( self, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], script_path: str, container_uri: str, requirements: Optional[Sequence[str]] = None, @@ -1568,7 +1571,6 @@ def __init__( model_instance_schema_uri: Optional[str] = None, model_parameters_schema_uri: Optional[str] = None, model_prediction_schema_uri: Optional[str] = None, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -2385,6 +2387,8 @@ class CustomContainerTrainingJob(_CustomTrainingJob): def __init__( self, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], container_uri: str, command: Sequence[str] = None, model_serving_container_image_uri: Optional[str] = None, @@ -2398,7 +2402,6 @@ def __init__( model_instance_schema_uri: Optional[str] = None, model_parameters_schema_uri: Optional[str] = None, model_prediction_schema_uri: Optional[str] = None, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -3193,13 +3196,14 @@ class AutoMLTabularTrainingJob(_TrainingJob): def __init__( self, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], optimization_prediction_type: str, optimization_objective: Optional[str] = None, column_specs: Optional[Dict[str, str]] = None, column_transformations: Optional[List[Dict[str, Dict[str, str]]]] = None, optimization_objective_recall_value: Optional[float] = None, optimization_objective_precision_value: Optional[float] = None, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -5184,6 +5188,8 @@ class CustomPythonPackageTrainingJob(_CustomTrainingJob): def __init__( self, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], python_package_gcs_uri: str, python_module_name: str, container_uri: str, @@ -5198,7 +5204,6 @@ def __init__( model_instance_schema_uri: Optional[str] = None, model_parameters_schema_uri: Optional[str] = None, model_prediction_schema_uri: Optional[str] = None, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -6355,10 +6360,11 @@ class AutoMLTextTrainingJob(_TrainingJob): def __init__( self, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], prediction_type: str, multi_label: bool = False, sentiment_max: int = 10, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, From 8c8c8aa1de68a0ff21f3bf90ce608cdab3c24a5f Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Mar 2022 02:41:49 -0800 Subject: [PATCH 11/17] Marked the BatchPredictJob.create job_display_name parameter as optional --- google/cloud/aiplatform/jobs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index befc0f3dab..6c252eb30b 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -344,7 +344,7 @@ def completion_stats(self) -> Optional[gca_completion_stats.CompletionStats]: @classmethod def create( cls, - job_display_name: str, + job_display_name: Optional[str], model_name: Union[str, "aiplatform.Model"], instances_format: str = "jsonl", predictions_format: str = "jsonl", @@ -374,7 +374,7 @@ def create( Args: job_display_name (str): - Required. The user-defined name of the BatchPredictionJob. + Optional. The user-defined name of the BatchPredictionJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. model_name (Union[str, aiplatform.Model]): From dfd164dbb1c0a173ffb7e5f22d8e2bc8e710b849 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Mon, 7 Mar 2022 15:14:50 -0800 Subject: [PATCH 12/17] Restored the parameter order in jobs.py as asked by reviewer --- google/cloud/aiplatform/jobs.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 5b21a8464a..07710c4062 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -1012,9 +1012,10 @@ class CustomJob(_RunnableJob): def __init__( self, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], worker_pool_specs: Union[List[Dict], List[aiplatform.gapic.WorkerPoolSpec]], base_output_dir: Optional[str] = None, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1171,6 +1172,8 @@ def _log_web_access_uris(self): @classmethod def from_local_script( cls, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], script_path: str, container_uri: str, args: Optional[Sequence[str]] = None, @@ -1186,7 +1189,6 @@ def from_local_script( reduction_server_machine_type: Optional[str] = None, reduction_server_container_uri: Optional[str] = None, base_output_dir: Optional[str] = None, - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1490,6 +1492,8 @@ class HyperparameterTuningJob(_RunnableJob): def __init__( self, + # TODO: Make display_name parameter fully optional in next major release + display_name: Optional[str], custom_job: CustomJob, metric_spec: Dict[str, str], parameter_spec: Dict[str, hyperparameter_tuning._ParameterSpec], @@ -1498,7 +1502,6 @@ def __init__( max_failed_trial_count: int = 0, search_algorithm: Optional[str] = None, measurement_selection: Optional[str] = "best", - display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, From 93acea098e01cc9931a403037c309217cd391be2 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Mon, 7 Mar 2022 15:16:16 -0800 Subject: [PATCH 13/17] Added bug link to the TODO comments --- google/cloud/aiplatform/jobs.py | 6 +++--- google/cloud/aiplatform/training_jobs.py | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 07710c4062..81dd3cd3b9 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -1012,7 +1012,7 @@ class CustomJob(_RunnableJob): def __init__( self, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], worker_pool_specs: Union[List[Dict], List[aiplatform.gapic.WorkerPoolSpec]], base_output_dir: Optional[str] = None, @@ -1172,7 +1172,7 @@ def _log_web_access_uris(self): @classmethod def from_local_script( cls, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], script_path: str, container_uri: str, @@ -1492,7 +1492,7 @@ class HyperparameterTuningJob(_RunnableJob): def __init__( self, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], custom_job: CustomJob, metric_spec: Dict[str, str], diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index 06c495a0dc..c5dfa2711e 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -1015,7 +1015,7 @@ class _CustomTrainingJob(_TrainingJob): def __init__( self, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], container_uri: str, model_serving_container_image_uri: Optional[str] = None, @@ -1555,7 +1555,7 @@ class CustomTrainingJob(_CustomTrainingJob): def __init__( self, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], script_path: str, container_uri: str, @@ -2387,7 +2387,7 @@ class CustomContainerTrainingJob(_CustomTrainingJob): def __init__( self, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], container_uri: str, command: Sequence[str] = None, @@ -3196,7 +3196,7 @@ class AutoMLTabularTrainingJob(_TrainingJob): def __init__( self, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], optimization_prediction_type: str, optimization_objective: Optional[str] = None, @@ -5004,7 +5004,7 @@ class CustomPythonPackageTrainingJob(_CustomTrainingJob): def __init__( self, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], python_package_gcs_uri: str, python_module_name: str, @@ -6176,7 +6176,7 @@ class AutoMLTextTrainingJob(_TrainingJob): def __init__( self, - # TODO: Make display_name parameter fully optional in next major release + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: Optional[str], prediction_type: str, multi_label: bool = False, From 348b51c4f702286bc22fda988a615eee849a9428 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Mon, 7 Mar 2022 15:19:19 -0800 Subject: [PATCH 14/17] Annotated _Dataset.create(display_name) as Optional --- google/cloud/aiplatform/datasets/dataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 8132effdb7..8f08eaa5b3 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -106,7 +106,7 @@ def _validate_metadata_schema_uri(self) -> None: @classmethod def create( cls, - display_name: str, + display_name: Optional[str], metadata_schema_uri: str, gcs_source: Optional[Union[str, Sequence[str]]] = None, bq_source: Optional[str] = None, From 57b12412ff5b79bf5819f82cd807491a48cfcf93 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 16 Mar 2022 22:06:47 -0700 Subject: [PATCH 15/17] Changed docstringg as asked by the reviewer --- google/cloud/aiplatform/datasets/dataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 8f08eaa5b3..223a595f61 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -125,7 +125,7 @@ def create( Args: display_name (str): - Optional. The user-defined name of the Dataset. + Required. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. metadata_schema_uri (str): From fee7b89f553c5e8b682c479578b31b05c30d76cd Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 31 Mar 2022 19:15:48 -0700 Subject: [PATCH 16/17] Implemented the reviewer request: Removed the Optional[str] annotations where default value cannot be specified --- google/cloud/aiplatform/datasets/dataset.py | 3 ++- google/cloud/aiplatform/jobs.py | 17 +++++++++-------- google/cloud/aiplatform/pipeline_jobs.py | 5 +++-- google/cloud/aiplatform/training_jobs.py | 20 ++++++++++---------- 4 files changed, 24 insertions(+), 21 deletions(-) diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 223a595f61..a564b507dc 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -106,7 +106,8 @@ def _validate_metadata_schema_uri(self) -> None: @classmethod def create( cls, - display_name: Optional[str], + # TODO(b/223262536): Make the display_name parameter optional in the next major release + display_name: str, metadata_schema_uri: str, gcs_source: Optional[Union[str, Sequence[str]]] = None, bq_source: Optional[str] = None, diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 81dd3cd3b9..ce0325ef18 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -344,7 +344,8 @@ def completion_stats(self) -> Optional[gca_completion_stats.CompletionStats]: @classmethod def create( cls, - job_display_name: Optional[str], + # TODO(b/223262536): Make the job_display_name parameter optional in the next major release + job_display_name: str, model_name: Union[str, "aiplatform.Model"], instances_format: str = "jsonl", predictions_format: str = "jsonl", @@ -374,7 +375,7 @@ def create( Args: job_display_name (str): - Optional. The user-defined name of the BatchPredictionJob. + Required. The user-defined name of the BatchPredictionJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. model_name (Union[str, aiplatform.Model]): @@ -1013,7 +1014,7 @@ class CustomJob(_RunnableJob): def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, worker_pool_specs: Union[List[Dict], List[aiplatform.gapic.WorkerPoolSpec]], base_output_dir: Optional[str] = None, project: Optional[str] = None, @@ -1059,7 +1060,7 @@ def __init__( Args: display_name (str): - Optional. The user-defined name of the HyperparameterTuningJob. + Required. The user-defined name of the HyperparameterTuningJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. worker_pool_specs (Union[List[Dict], List[aiplatform.gapic.WorkerPoolSpec]]): @@ -1173,7 +1174,7 @@ def _log_web_access_uris(self): def from_local_script( cls, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, script_path: str, container_uri: str, args: Optional[Sequence[str]] = None, @@ -1216,7 +1217,7 @@ def from_local_script( Args: display_name (str): - Optional. The user-defined name of this CustomJob. + Required. The user-defined name of this CustomJob. script_path (str): Required. Local path to training script. container_uri (str): @@ -1493,7 +1494,7 @@ class HyperparameterTuningJob(_RunnableJob): def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, custom_job: CustomJob, metric_spec: Dict[str, str], parameter_spec: Dict[str, hyperparameter_tuning._ParameterSpec], @@ -1567,7 +1568,7 @@ def __init__( Args: display_name (str): - Optional. The user-defined name of the HyperparameterTuningJob. + Required. The user-defined name of the HyperparameterTuningJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. custom_job (aiplatform.CustomJob): diff --git a/google/cloud/aiplatform/pipeline_jobs.py b/google/cloud/aiplatform/pipeline_jobs.py index 8e4f4c832c..60acd553af 100644 --- a/google/cloud/aiplatform/pipeline_jobs.py +++ b/google/cloud/aiplatform/pipeline_jobs.py @@ -89,8 +89,9 @@ class PipelineJob(base.VertexAiResourceNounWithFutureManager): def __init__( self, + # TODO(b/223262536): Make the display_name parameter optional in the next major release + display_name: str, template_path: str, - display_name: Optional[str], job_id: Optional[str] = None, pipeline_root: Optional[str] = None, parameter_values: Optional[Dict[str, Any]] = None, @@ -106,7 +107,7 @@ def __init__( Args: display_name (str): - Optional. The user-defined name of this Pipeline. + Required. The user-defined name of this Pipeline. template_path (str): Required. The path of PipelineJob or PipelineSpec JSON file. It can be a local path or a Google Cloud Storage URI. diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index c5dfa2711e..e904139efe 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -1016,7 +1016,7 @@ class _CustomTrainingJob(_TrainingJob): def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, container_uri: str, model_serving_container_image_uri: Optional[str] = None, model_serving_container_predict_route: Optional[str] = None, @@ -1040,7 +1040,7 @@ def __init__( """ Args: display_name (str): - Optional. The user-defined name of this TrainingPipeline. + Required. The user-defined name of this TrainingPipeline. container_uri (str): Required: Uri of the training container image in the GCR. model_serving_container_image_uri (str): @@ -1556,7 +1556,7 @@ class CustomTrainingJob(_CustomTrainingJob): def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, script_path: str, container_uri: str, requirements: Optional[Sequence[str]] = None, @@ -1616,7 +1616,7 @@ def __init__( Args: display_name (str): - Optional. The user-defined name of this TrainingPipeline. + Required. The user-defined name of this TrainingPipeline. script_path (str): Required. Local path to training script. container_uri (str): Required: Uri of the training container image in the GCR. @@ -2388,7 +2388,7 @@ class CustomContainerTrainingJob(_CustomTrainingJob): def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, container_uri: str, command: Sequence[str] = None, model_serving_container_image_uri: Optional[str] = None, @@ -2446,7 +2446,7 @@ def __init__( Args: display_name (str): - Optional. The user-defined name of this TrainingPipeline. + Required. The user-defined name of this TrainingPipeline. container_uri (str): Required: Uri of the training container image in the GCR. command (Sequence[str]): @@ -3197,7 +3197,7 @@ class AutoMLTabularTrainingJob(_TrainingJob): def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, optimization_prediction_type: str, optimization_objective: Optional[str] = None, column_specs: Optional[Dict[str, str]] = None, @@ -3225,7 +3225,7 @@ def __init__( Args: display_name (str): - Optional. The user-defined name of this TrainingPipeline. + Required. The user-defined name of this TrainingPipeline. optimization_prediction_type (str): The type of prediction the Model is to produce. "classification" - Predict one out of multiple target values is @@ -5005,7 +5005,7 @@ class CustomPythonPackageTrainingJob(_CustomTrainingJob): def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, python_package_gcs_uri: str, python_module_name: str, container_uri: str, @@ -6177,7 +6177,7 @@ class AutoMLTextTrainingJob(_TrainingJob): def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release - display_name: Optional[str], + display_name: str, prediction_type: str, multi_label: bool = False, sentiment_max: int = 10, From 92c1eafe41ebf99949493961d4210323605f0821 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 31 Mar 2022 20:56:10 -0700 Subject: [PATCH 17/17] Fixed code formatting --- tests/unit/aiplatform/test_datasets.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/unit/aiplatform/test_datasets.py b/tests/unit/aiplatform/test_datasets.py index 03b63410b0..ae75001d96 100644 --- a/tests/unit/aiplatform/test_datasets.py +++ b/tests/unit/aiplatform/test_datasets.py @@ -1121,7 +1121,8 @@ def test_create_dataset(self, create_dataset_mock, sync): def test_create_dataset_with_default_display_name(self, create_dataset_mock, sync): my_dataset = datasets.TabularDataset.create( - bq_source=_TEST_SOURCE_URI_BQ, sync=sync, + bq_source=_TEST_SOURCE_URI_BQ, + sync=sync, ) if not sync: